2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter <clameter@sgi.com>
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License as
8 * published by the Free Software Foundation; either version 2, or (at
9 * your option) any later version.
11 * This program is distributed in the hope that it will be useful, but
12 * WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program; if not, write to the Free Software
18 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 #include <linux/errno.h>
22 #include <linux/init.h>
23 #include <linux/kernel.h>
24 #include <linux/module.h>
25 #include <linux/radix-tree.h>
26 #include <linux/percpu.h>
27 #include <linux/slab.h>
28 #include <linux/notifier.h>
29 #include <linux/cpu.h>
30 #include <linux/gfp.h>
31 #include <linux/string.h>
32 #include <linux/bitops.h>
36 #define RADIX_TREE_MAP_SHIFT 6
38 #define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */
40 #define RADIX_TREE_TAGS 2
42 #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
43 #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
45 #define RADIX_TREE_TAG_LONGS \
46 ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
48 struct radix_tree_node
{
50 void *slots
[RADIX_TREE_MAP_SIZE
];
51 unsigned long tags
[RADIX_TREE_TAGS
][RADIX_TREE_TAG_LONGS
];
54 struct radix_tree_path
{
55 struct radix_tree_node
*node
;
59 #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long))
60 #define RADIX_TREE_MAX_PATH (RADIX_TREE_INDEX_BITS/RADIX_TREE_MAP_SHIFT + 2)
62 static unsigned long height_to_maxindex
[RADIX_TREE_MAX_PATH
] __read_mostly
;
65 * Radix tree node cache.
67 static kmem_cache_t
*radix_tree_node_cachep
;
70 * Per-cpu pool of preloaded nodes
72 struct radix_tree_preload
{
74 struct radix_tree_node
*nodes
[RADIX_TREE_MAX_PATH
];
76 DEFINE_PER_CPU(struct radix_tree_preload
, radix_tree_preloads
) = { 0, };
79 * This assumes that the caller has performed appropriate preallocation, and
80 * that the caller has pinned this thread of control to the current CPU.
82 static struct radix_tree_node
*
83 radix_tree_node_alloc(struct radix_tree_root
*root
)
85 struct radix_tree_node
*ret
;
87 ret
= kmem_cache_alloc(radix_tree_node_cachep
, root
->gfp_mask
);
88 if (ret
== NULL
&& !(root
->gfp_mask
& __GFP_WAIT
)) {
89 struct radix_tree_preload
*rtp
;
91 rtp
= &__get_cpu_var(radix_tree_preloads
);
93 ret
= rtp
->nodes
[rtp
->nr
- 1];
94 rtp
->nodes
[rtp
->nr
- 1] = NULL
;
102 radix_tree_node_free(struct radix_tree_node
*node
)
104 kmem_cache_free(radix_tree_node_cachep
, node
);
108 * Load up this CPU's radix_tree_node buffer with sufficient objects to
109 * ensure that the addition of a single element in the tree cannot fail. On
110 * success, return zero, with preemption disabled. On error, return -ENOMEM
111 * with preemption not disabled.
113 int radix_tree_preload(int gfp_mask
)
115 struct radix_tree_preload
*rtp
;
116 struct radix_tree_node
*node
;
120 rtp
= &__get_cpu_var(radix_tree_preloads
);
121 while (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
)) {
123 node
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
127 rtp
= &__get_cpu_var(radix_tree_preloads
);
128 if (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
))
129 rtp
->nodes
[rtp
->nr
++] = node
;
131 kmem_cache_free(radix_tree_node_cachep
, node
);
138 static inline void tag_set(struct radix_tree_node
*node
, int tag
, int offset
)
140 if (!test_bit(offset
, &node
->tags
[tag
][0]))
141 __set_bit(offset
, &node
->tags
[tag
][0]);
144 static inline void tag_clear(struct radix_tree_node
*node
, int tag
, int offset
)
146 __clear_bit(offset
, &node
->tags
[tag
][0]);
149 static inline int tag_get(struct radix_tree_node
*node
, int tag
, int offset
)
151 return test_bit(offset
, &node
->tags
[tag
][0]);
155 * Return the maximum key which can be store into a
156 * radix tree with height HEIGHT.
158 static inline unsigned long radix_tree_maxindex(unsigned int height
)
160 return height_to_maxindex
[height
];
164 * Extend a radix tree so it can store key @index.
166 static int radix_tree_extend(struct radix_tree_root
*root
, unsigned long index
)
168 struct radix_tree_node
*node
;
170 char tags
[RADIX_TREE_TAGS
];
173 /* Figure out what the height should be. */
174 height
= root
->height
+ 1;
175 while (index
> radix_tree_maxindex(height
))
178 if (root
->rnode
== NULL
) {
179 root
->height
= height
;
184 * Prepare the tag status of the top-level node for propagation
185 * into the newly-pushed top-level node(s)
187 for (tag
= 0; tag
< RADIX_TREE_TAGS
; tag
++) {
191 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
192 if (root
->rnode
->tags
[tag
][idx
]) {
200 if (!(node
= radix_tree_node_alloc(root
)))
203 /* Increase the height. */
204 node
->slots
[0] = root
->rnode
;
206 /* Propagate the aggregated tag info into the new root */
207 for (tag
= 0; tag
< RADIX_TREE_TAGS
; tag
++) {
209 tag_set(node
, tag
, 0);
215 } while (height
> root
->height
);
221 * radix_tree_insert - insert into a radix tree
222 * @root: radix tree root
224 * @item: item to insert
226 * Insert an item into the radix tree at position @index.
228 int radix_tree_insert(struct radix_tree_root
*root
,
229 unsigned long index
, void *item
)
231 struct radix_tree_node
*node
= NULL
, *slot
;
232 unsigned int height
, shift
;
236 /* Make sure the tree is high enough. */
237 if ((!index
&& !root
->rnode
) ||
238 index
> radix_tree_maxindex(root
->height
)) {
239 error
= radix_tree_extend(root
, index
);
245 height
= root
->height
;
246 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
248 offset
= 0; /* uninitialised var warning */
251 /* Have to add a child node. */
252 if (!(slot
= radix_tree_node_alloc(root
)))
255 node
->slots
[offset
] = slot
;
261 /* Go a level down */
262 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
264 slot
= node
->slots
[offset
];
265 shift
-= RADIX_TREE_MAP_SHIFT
;
274 node
->slots
[offset
] = item
;
275 BUG_ON(tag_get(node
, 0, offset
));
276 BUG_ON(tag_get(node
, 1, offset
));
282 EXPORT_SYMBOL(radix_tree_insert
);
285 * radix_tree_lookup - perform lookup operation on a radix tree
286 * @root: radix tree root
289 * Lookup the item at the position @index in the radix tree @root.
291 void *radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
)
293 unsigned int height
, shift
;
294 struct radix_tree_node
*slot
;
296 height
= root
->height
;
297 if (index
> radix_tree_maxindex(height
))
300 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
307 slot
= slot
->slots
[(index
>> shift
) & RADIX_TREE_MAP_MASK
];
308 shift
-= RADIX_TREE_MAP_SHIFT
;
314 EXPORT_SYMBOL(radix_tree_lookup
);
317 * radix_tree_tag_set - set a tag on a radix tree node
318 * @root: radix tree root
322 * Set the search tag corresponging to @index in the radix tree. From
323 * the root all the way down to the leaf node.
325 * Returns the address of the tagged item. Setting a tag on a not-present
328 void *radix_tree_tag_set(struct radix_tree_root
*root
,
329 unsigned long index
, int tag
)
331 unsigned int height
, shift
;
332 struct radix_tree_node
*slot
;
334 height
= root
->height
;
335 if (index
> radix_tree_maxindex(height
))
338 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
344 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
345 tag_set(slot
, tag
, offset
);
346 slot
= slot
->slots
[offset
];
347 BUG_ON(slot
== NULL
);
348 shift
-= RADIX_TREE_MAP_SHIFT
;
354 EXPORT_SYMBOL(radix_tree_tag_set
);
357 * radix_tree_tag_clear - clear a tag on a radix tree node
358 * @root: radix tree root
362 * Clear the search tag corresponging to @index in the radix tree. If
363 * this causes the leaf node to have no tags set then clear the tag in the
364 * next-to-leaf node, etc.
366 * Returns the address of the tagged item on success, else NULL. ie:
367 * has the same return value and semantics as radix_tree_lookup().
369 void *radix_tree_tag_clear(struct radix_tree_root
*root
,
370 unsigned long index
, int tag
)
372 struct radix_tree_path path
[RADIX_TREE_MAX_PATH
], *pathp
= path
;
373 struct radix_tree_node
*slot
;
374 unsigned int height
, shift
;
377 height
= root
->height
;
378 if (index
> radix_tree_maxindex(height
))
381 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
391 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
392 pathp
[1].offset
= offset
;
393 pathp
[1].node
= slot
;
394 slot
= slot
->slots
[offset
];
396 shift
-= RADIX_TREE_MAP_SHIFT
;
407 tag_clear(pathp
->node
, tag
, pathp
->offset
);
408 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
409 if (pathp
->node
->tags
[tag
][idx
])
413 } while (pathp
->node
);
417 EXPORT_SYMBOL(radix_tree_tag_clear
);
419 #ifndef __KERNEL__ /* Only the test harness uses this at present */
421 * radix_tree_tag_get - get a tag on a radix tree node
422 * @root: radix tree root
426 * Return the search tag corresponging to @index in the radix tree.
428 * Returns zero if the tag is unset, or if there is no corresponding item
431 int radix_tree_tag_get(struct radix_tree_root
*root
,
432 unsigned long index
, int tag
)
434 unsigned int height
, shift
;
435 struct radix_tree_node
*slot
;
436 int saw_unset_tag
= 0;
438 height
= root
->height
;
439 if (index
> radix_tree_maxindex(height
))
442 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
451 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
454 * This is just a debug check. Later, we can bale as soon as
455 * we see an unset tag.
457 if (!tag_get(slot
, tag
, offset
))
460 int ret
= tag_get(slot
, tag
, offset
);
462 BUG_ON(ret
&& saw_unset_tag
);
465 slot
= slot
->slots
[offset
];
466 shift
-= RADIX_TREE_MAP_SHIFT
;
470 EXPORT_SYMBOL(radix_tree_tag_get
);
474 __lookup(struct radix_tree_root
*root
, void **results
, unsigned long index
,
475 unsigned int max_items
, unsigned long *next_index
)
477 unsigned int nr_found
= 0;
478 unsigned int shift
, height
;
479 struct radix_tree_node
*slot
;
482 height
= root
->height
;
486 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
489 for ( ; height
> 1; height
--) {
491 for (i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
492 i
< RADIX_TREE_MAP_SIZE
; i
++) {
493 if (slot
->slots
[i
] != NULL
)
495 index
&= ~((1UL << shift
) - 1);
496 index
+= 1UL << shift
;
498 goto out
; /* 32-bit wraparound */
500 if (i
== RADIX_TREE_MAP_SIZE
)
503 shift
-= RADIX_TREE_MAP_SHIFT
;
504 slot
= slot
->slots
[i
];
507 /* Bottom level: grab some items */
508 for (i
= index
& RADIX_TREE_MAP_MASK
; i
< RADIX_TREE_MAP_SIZE
; i
++) {
510 if (slot
->slots
[i
]) {
511 results
[nr_found
++] = slot
->slots
[i
];
512 if (nr_found
== max_items
)
522 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
523 * @root: radix tree root
524 * @results: where the results of the lookup are placed
525 * @first_index: start the lookup from this key
526 * @max_items: place up to this many items at *results
528 * Performs an index-ascending scan of the tree for present items. Places
529 * them at *@results and returns the number of items which were placed at
532 * The implementation is naive.
535 radix_tree_gang_lookup(struct radix_tree_root
*root
, void **results
,
536 unsigned long first_index
, unsigned int max_items
)
538 const unsigned long max_index
= radix_tree_maxindex(root
->height
);
539 unsigned long cur_index
= first_index
;
540 unsigned int ret
= 0;
542 while (ret
< max_items
) {
543 unsigned int nr_found
;
544 unsigned long next_index
; /* Index of next search */
546 if (cur_index
> max_index
)
548 nr_found
= __lookup(root
, results
+ ret
, cur_index
,
549 max_items
- ret
, &next_index
);
553 cur_index
= next_index
;
557 EXPORT_SYMBOL(radix_tree_gang_lookup
);
560 * FIXME: the two tag_get()s here should use find_next_bit() instead of
561 * open-coding the search.
564 __lookup_tag(struct radix_tree_root
*root
, void **results
, unsigned long index
,
565 unsigned int max_items
, unsigned long *next_index
, int tag
)
567 unsigned int nr_found
= 0;
569 unsigned int height
= root
->height
;
570 struct radix_tree_node
*slot
;
572 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
576 unsigned long i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
578 for ( ; i
< RADIX_TREE_MAP_SIZE
; i
++) {
579 if (tag_get(slot
, tag
, i
)) {
580 BUG_ON(slot
->slots
[i
] == NULL
);
583 index
&= ~((1UL << shift
) - 1);
584 index
+= 1UL << shift
;
586 goto out
; /* 32-bit wraparound */
588 if (i
== RADIX_TREE_MAP_SIZE
)
591 if (height
== 0) { /* Bottom level: grab some items */
592 unsigned long j
= index
& RADIX_TREE_MAP_MASK
;
594 for ( ; j
< RADIX_TREE_MAP_SIZE
; j
++) {
596 if (tag_get(slot
, tag
, j
)) {
597 BUG_ON(slot
->slots
[j
] == NULL
);
598 results
[nr_found
++] = slot
->slots
[j
];
599 if (nr_found
== max_items
)
604 shift
-= RADIX_TREE_MAP_SHIFT
;
605 slot
= slot
->slots
[i
];
613 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
615 * @root: radix tree root
616 * @results: where the results of the lookup are placed
617 * @first_index: start the lookup from this key
618 * @max_items: place up to this many items at *results
619 * @tag: the tag index
621 * Performs an index-ascending scan of the tree for present items which
622 * have the tag indexed by @tag set. Places the items at *@results and
623 * returns the number of items which were placed at *@results.
626 radix_tree_gang_lookup_tag(struct radix_tree_root
*root
, void **results
,
627 unsigned long first_index
, unsigned int max_items
, int tag
)
629 const unsigned long max_index
= radix_tree_maxindex(root
->height
);
630 unsigned long cur_index
= first_index
;
631 unsigned int ret
= 0;
633 while (ret
< max_items
) {
634 unsigned int nr_found
;
635 unsigned long next_index
; /* Index of next search */
637 if (cur_index
> max_index
)
639 nr_found
= __lookup_tag(root
, results
+ ret
, cur_index
,
640 max_items
- ret
, &next_index
, tag
);
644 cur_index
= next_index
;
648 EXPORT_SYMBOL(radix_tree_gang_lookup_tag
);
651 * radix_tree_delete - delete an item from a radix tree
652 * @root: radix tree root
655 * Remove the item at @index from the radix tree rooted at @root.
657 * Returns the address of the deleted item, or NULL if it was not present.
659 void *radix_tree_delete(struct radix_tree_root
*root
, unsigned long index
)
661 struct radix_tree_path path
[RADIX_TREE_MAX_PATH
], *pathp
= path
;
662 struct radix_tree_path
*orig_pathp
;
663 struct radix_tree_node
*slot
;
664 unsigned int height
, shift
;
666 char tags
[RADIX_TREE_TAGS
];
669 height
= root
->height
;
670 if (index
> radix_tree_maxindex(height
))
673 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
677 for ( ; height
> 0; height
--) {
683 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
684 pathp
[1].offset
= offset
;
685 pathp
[1].node
= slot
;
686 slot
= slot
->slots
[offset
];
688 shift
-= RADIX_TREE_MAP_SHIFT
;
698 * Clear all tags associated with the just-deleted item
700 memset(tags
, 0, sizeof(tags
));
704 nr_cleared_tags
= RADIX_TREE_TAGS
;
705 for (tag
= 0; tag
< RADIX_TREE_TAGS
; tag
++) {
711 tag_clear(pathp
->node
, tag
, pathp
->offset
);
713 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
714 if (pathp
->node
->tags
[tag
][idx
]) {
722 } while (pathp
->node
&& nr_cleared_tags
);
724 /* Now free the nodes we do not need anymore */
725 for (pathp
= orig_pathp
; pathp
->node
; pathp
--) {
726 pathp
->node
->slots
[pathp
->offset
] = NULL
;
727 if (--pathp
->node
->count
)
730 /* Node with zero slots in use so free it */
731 radix_tree_node_free(pathp
->node
);
738 EXPORT_SYMBOL(radix_tree_delete
);
741 * radix_tree_tagged - test whether any items in the tree are tagged
742 * @root: radix tree root
745 int radix_tree_tagged(struct radix_tree_root
*root
, int tag
)
751 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
752 if (root
->rnode
->tags
[tag
][idx
])
757 EXPORT_SYMBOL(radix_tree_tagged
);
760 radix_tree_node_ctor(void *node
, kmem_cache_t
*cachep
, unsigned long flags
)
762 memset(node
, 0, sizeof(struct radix_tree_node
));
765 static __init
unsigned long __maxindex(unsigned int height
)
767 unsigned int tmp
= height
* RADIX_TREE_MAP_SHIFT
;
768 unsigned long index
= (~0UL >> (RADIX_TREE_INDEX_BITS
- tmp
- 1)) >> 1;
770 if (tmp
>= RADIX_TREE_INDEX_BITS
)
775 static __init
void radix_tree_init_maxindex(void)
779 for (i
= 0; i
< ARRAY_SIZE(height_to_maxindex
); i
++)
780 height_to_maxindex
[i
] = __maxindex(i
);
783 #ifdef CONFIG_HOTPLUG_CPU
784 static int radix_tree_callback(struct notifier_block
*nfb
,
785 unsigned long action
,
788 int cpu
= (long)hcpu
;
789 struct radix_tree_preload
*rtp
;
791 /* Free per-cpu pool of perloaded nodes */
792 if (action
== CPU_DEAD
) {
793 rtp
= &per_cpu(radix_tree_preloads
, cpu
);
795 kmem_cache_free(radix_tree_node_cachep
,
796 rtp
->nodes
[rtp
->nr
-1]);
797 rtp
->nodes
[rtp
->nr
-1] = NULL
;
803 #endif /* CONFIG_HOTPLUG_CPU */
805 void __init
radix_tree_init(void)
807 radix_tree_node_cachep
= kmem_cache_create("radix_tree_node",
808 sizeof(struct radix_tree_node
), 0,
809 SLAB_PANIC
, radix_tree_node_ctor
, NULL
);
810 radix_tree_init_maxindex();
811 hotcpu_notifier(radix_tree_callback
, 0);