2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter <clameter@sgi.com>
5 * Copyright (C) 2006 Nick Piggin
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License as
9 * published by the Free Software Foundation; either version 2, or (at
10 * your option) any later version.
12 * This program is distributed in the hope that it will be useful, but
13 * WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * General Public License for more details.
17 * You should have received a copy of the GNU General Public License
18 * along with this program; if not, write to the Free Software
19 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
22 #include <linux/errno.h>
23 #include <linux/init.h>
24 #include <linux/kernel.h>
25 #include <linux/module.h>
26 #include <linux/radix-tree.h>
27 #include <linux/percpu.h>
28 #include <linux/slab.h>
29 #include <linux/notifier.h>
30 #include <linux/cpu.h>
31 #include <linux/gfp.h>
32 #include <linux/string.h>
33 #include <linux/bitops.h>
34 #include <linux/rcupdate.h>
38 #define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6)
40 #define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */
43 #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
44 #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
46 #define RADIX_TREE_TAG_LONGS \
47 ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
49 struct radix_tree_node
{
50 unsigned int height
; /* Height from the bottom */
52 struct rcu_head rcu_head
;
53 void *slots
[RADIX_TREE_MAP_SIZE
];
54 unsigned long tags
[RADIX_TREE_MAX_TAGS
][RADIX_TREE_TAG_LONGS
];
57 struct radix_tree_path
{
58 struct radix_tree_node
*node
;
62 #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long))
63 #define RADIX_TREE_MAX_PATH (RADIX_TREE_INDEX_BITS/RADIX_TREE_MAP_SHIFT + 2)
65 static unsigned long height_to_maxindex
[RADIX_TREE_MAX_PATH
] __read_mostly
;
68 * Radix tree node cache.
70 static struct kmem_cache
*radix_tree_node_cachep
;
73 * Per-cpu pool of preloaded nodes
75 struct radix_tree_preload
{
77 struct radix_tree_node
*nodes
[RADIX_TREE_MAX_PATH
];
79 DEFINE_PER_CPU(struct radix_tree_preload
, radix_tree_preloads
) = { 0, };
81 static inline gfp_t
root_gfp_mask(struct radix_tree_root
*root
)
83 return root
->gfp_mask
& __GFP_BITS_MASK
;
87 * This assumes that the caller has performed appropriate preallocation, and
88 * that the caller has pinned this thread of control to the current CPU.
90 static struct radix_tree_node
*
91 radix_tree_node_alloc(struct radix_tree_root
*root
)
93 struct radix_tree_node
*ret
;
94 gfp_t gfp_mask
= root_gfp_mask(root
);
96 ret
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
97 if (ret
== NULL
&& !(gfp_mask
& __GFP_WAIT
)) {
98 struct radix_tree_preload
*rtp
;
100 rtp
= &__get_cpu_var(radix_tree_preloads
);
102 ret
= rtp
->nodes
[rtp
->nr
- 1];
103 rtp
->nodes
[rtp
->nr
- 1] = NULL
;
107 BUG_ON(radix_tree_is_direct_ptr(ret
));
111 static void radix_tree_node_rcu_free(struct rcu_head
*head
)
113 struct radix_tree_node
*node
=
114 container_of(head
, struct radix_tree_node
, rcu_head
);
115 kmem_cache_free(radix_tree_node_cachep
, node
);
119 radix_tree_node_free(struct radix_tree_node
*node
)
121 call_rcu(&node
->rcu_head
, radix_tree_node_rcu_free
);
125 * Load up this CPU's radix_tree_node buffer with sufficient objects to
126 * ensure that the addition of a single element in the tree cannot fail. On
127 * success, return zero, with preemption disabled. On error, return -ENOMEM
128 * with preemption not disabled.
130 int radix_tree_preload(gfp_t gfp_mask
)
132 struct radix_tree_preload
*rtp
;
133 struct radix_tree_node
*node
;
137 rtp
= &__get_cpu_var(radix_tree_preloads
);
138 while (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
)) {
140 node
= kmem_cache_alloc(radix_tree_node_cachep
, gfp_mask
);
144 rtp
= &__get_cpu_var(radix_tree_preloads
);
145 if (rtp
->nr
< ARRAY_SIZE(rtp
->nodes
))
146 rtp
->nodes
[rtp
->nr
++] = node
;
148 kmem_cache_free(radix_tree_node_cachep
, node
);
154 EXPORT_SYMBOL(radix_tree_preload
);
156 static inline void tag_set(struct radix_tree_node
*node
, unsigned int tag
,
159 __set_bit(offset
, node
->tags
[tag
]);
162 static inline void tag_clear(struct radix_tree_node
*node
, unsigned int tag
,
165 __clear_bit(offset
, node
->tags
[tag
]);
168 static inline int tag_get(struct radix_tree_node
*node
, unsigned int tag
,
171 return test_bit(offset
, node
->tags
[tag
]);
174 static inline void root_tag_set(struct radix_tree_root
*root
, unsigned int tag
)
176 root
->gfp_mask
|= (__force gfp_t
)(1 << (tag
+ __GFP_BITS_SHIFT
));
180 static inline void root_tag_clear(struct radix_tree_root
*root
, unsigned int tag
)
182 root
->gfp_mask
&= (__force gfp_t
)~(1 << (tag
+ __GFP_BITS_SHIFT
));
185 static inline void root_tag_clear_all(struct radix_tree_root
*root
)
187 root
->gfp_mask
&= __GFP_BITS_MASK
;
190 static inline int root_tag_get(struct radix_tree_root
*root
, unsigned int tag
)
192 return (__force
unsigned)root
->gfp_mask
& (1 << (tag
+ __GFP_BITS_SHIFT
));
196 * Returns 1 if any slot in the node has this tag set.
197 * Otherwise returns 0.
199 static inline int any_tag_set(struct radix_tree_node
*node
, unsigned int tag
)
202 for (idx
= 0; idx
< RADIX_TREE_TAG_LONGS
; idx
++) {
203 if (node
->tags
[tag
][idx
])
210 * Return the maximum key which can be store into a
211 * radix tree with height HEIGHT.
213 static inline unsigned long radix_tree_maxindex(unsigned int height
)
215 return height_to_maxindex
[height
];
219 * Extend a radix tree so it can store key @index.
221 static int radix_tree_extend(struct radix_tree_root
*root
, unsigned long index
)
223 struct radix_tree_node
*node
;
227 /* Figure out what the height should be. */
228 height
= root
->height
+ 1;
229 while (index
> radix_tree_maxindex(height
))
232 if (root
->rnode
== NULL
) {
233 root
->height
= height
;
238 unsigned int newheight
;
239 if (!(node
= radix_tree_node_alloc(root
)))
242 /* Increase the height. */
243 node
->slots
[0] = radix_tree_direct_to_ptr(root
->rnode
);
245 /* Propagate the aggregated tag info into the new root */
246 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
247 if (root_tag_get(root
, tag
))
248 tag_set(node
, tag
, 0);
251 newheight
= root
->height
+1;
252 node
->height
= newheight
;
254 rcu_assign_pointer(root
->rnode
, node
);
255 root
->height
= newheight
;
256 } while (height
> root
->height
);
262 * radix_tree_insert - insert into a radix tree
263 * @root: radix tree root
265 * @item: item to insert
267 * Insert an item into the radix tree at position @index.
269 int radix_tree_insert(struct radix_tree_root
*root
,
270 unsigned long index
, void *item
)
272 struct radix_tree_node
*node
= NULL
, *slot
;
273 unsigned int height
, shift
;
277 BUG_ON(radix_tree_is_direct_ptr(item
));
279 /* Make sure the tree is high enough. */
280 if (index
> radix_tree_maxindex(root
->height
)) {
281 error
= radix_tree_extend(root
, index
);
287 height
= root
->height
;
288 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
290 offset
= 0; /* uninitialised var warning */
293 /* Have to add a child node. */
294 if (!(slot
= radix_tree_node_alloc(root
)))
296 slot
->height
= height
;
298 rcu_assign_pointer(node
->slots
[offset
], slot
);
301 rcu_assign_pointer(root
->rnode
, slot
);
304 /* Go a level down */
305 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
307 slot
= node
->slots
[offset
];
308 shift
-= RADIX_TREE_MAP_SHIFT
;
317 rcu_assign_pointer(node
->slots
[offset
], item
);
318 BUG_ON(tag_get(node
, 0, offset
));
319 BUG_ON(tag_get(node
, 1, offset
));
321 rcu_assign_pointer(root
->rnode
, radix_tree_ptr_to_direct(item
));
322 BUG_ON(root_tag_get(root
, 0));
323 BUG_ON(root_tag_get(root
, 1));
328 EXPORT_SYMBOL(radix_tree_insert
);
331 * radix_tree_lookup_slot - lookup a slot in a radix tree
332 * @root: radix tree root
335 * Returns: the slot corresponding to the position @index in the
336 * radix tree @root. This is useful for update-if-exists operations.
338 * This function cannot be called under rcu_read_lock, it must be
339 * excluded from writers, as must the returned slot for subsequent
340 * use by radix_tree_deref_slot() and radix_tree_replace slot.
341 * Caller must hold tree write locked across slot lookup and
344 void **radix_tree_lookup_slot(struct radix_tree_root
*root
, unsigned long index
)
346 unsigned int height
, shift
;
347 struct radix_tree_node
*node
, **slot
;
353 if (radix_tree_is_direct_ptr(node
)) {
356 return (void **)&root
->rnode
;
359 height
= node
->height
;
360 if (index
> radix_tree_maxindex(height
))
363 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
366 slot
= (struct radix_tree_node
**)
367 (node
->slots
+ ((index
>>shift
) & RADIX_TREE_MAP_MASK
));
372 shift
-= RADIX_TREE_MAP_SHIFT
;
374 } while (height
> 0);
376 return (void **)slot
;
378 EXPORT_SYMBOL(radix_tree_lookup_slot
);
381 * radix_tree_lookup - perform lookup operation on a radix tree
382 * @root: radix tree root
385 * Lookup the item at the position @index in the radix tree @root.
387 * This function can be called under rcu_read_lock, however the caller
388 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
389 * them safely). No RCU barriers are required to access or modify the
390 * returned item, however.
392 void *radix_tree_lookup(struct radix_tree_root
*root
, unsigned long index
)
394 unsigned int height
, shift
;
395 struct radix_tree_node
*node
, **slot
;
397 node
= rcu_dereference(root
->rnode
);
401 if (radix_tree_is_direct_ptr(node
)) {
404 return radix_tree_direct_to_ptr(node
);
407 height
= node
->height
;
408 if (index
> radix_tree_maxindex(height
))
411 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
414 slot
= (struct radix_tree_node
**)
415 (node
->slots
+ ((index
>>shift
) & RADIX_TREE_MAP_MASK
));
416 node
= rcu_dereference(*slot
);
420 shift
-= RADIX_TREE_MAP_SHIFT
;
422 } while (height
> 0);
426 EXPORT_SYMBOL(radix_tree_lookup
);
429 * radix_tree_tag_set - set a tag on a radix tree node
430 * @root: radix tree root
434 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
435 * corresponding to @index in the radix tree. From
436 * the root all the way down to the leaf node.
438 * Returns the address of the tagged item. Setting a tag on a not-present
441 void *radix_tree_tag_set(struct radix_tree_root
*root
,
442 unsigned long index
, unsigned int tag
)
444 unsigned int height
, shift
;
445 struct radix_tree_node
*slot
;
447 height
= root
->height
;
448 BUG_ON(index
> radix_tree_maxindex(height
));
451 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
456 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
457 if (!tag_get(slot
, tag
, offset
))
458 tag_set(slot
, tag
, offset
);
459 slot
= slot
->slots
[offset
];
460 BUG_ON(slot
== NULL
);
461 shift
-= RADIX_TREE_MAP_SHIFT
;
465 /* set the root's tag bit */
466 if (slot
&& !root_tag_get(root
, tag
))
467 root_tag_set(root
, tag
);
471 EXPORT_SYMBOL(radix_tree_tag_set
);
474 * radix_tree_tag_clear - clear a tag on a radix tree node
475 * @root: radix tree root
479 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
480 * corresponding to @index in the radix tree. If
481 * this causes the leaf node to have no tags set then clear the tag in the
482 * next-to-leaf node, etc.
484 * Returns the address of the tagged item on success, else NULL. ie:
485 * has the same return value and semantics as radix_tree_lookup().
487 void *radix_tree_tag_clear(struct radix_tree_root
*root
,
488 unsigned long index
, unsigned int tag
)
490 struct radix_tree_path path
[RADIX_TREE_MAX_PATH
], *pathp
= path
;
491 struct radix_tree_node
*slot
= NULL
;
492 unsigned int height
, shift
;
494 height
= root
->height
;
495 if (index
> radix_tree_maxindex(height
))
498 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
508 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
509 pathp
[1].offset
= offset
;
510 pathp
[1].node
= slot
;
511 slot
= slot
->slots
[offset
];
513 shift
-= RADIX_TREE_MAP_SHIFT
;
520 while (pathp
->node
) {
521 if (!tag_get(pathp
->node
, tag
, pathp
->offset
))
523 tag_clear(pathp
->node
, tag
, pathp
->offset
);
524 if (any_tag_set(pathp
->node
, tag
))
529 /* clear the root's tag bit */
530 if (root_tag_get(root
, tag
))
531 root_tag_clear(root
, tag
);
536 EXPORT_SYMBOL(radix_tree_tag_clear
);
538 #ifndef __KERNEL__ /* Only the test harness uses this at present */
540 * radix_tree_tag_get - get a tag on a radix tree node
541 * @root: radix tree root
543 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
547 * 0: tag not present or not set
550 int radix_tree_tag_get(struct radix_tree_root
*root
,
551 unsigned long index
, unsigned int tag
)
553 unsigned int height
, shift
;
554 struct radix_tree_node
*node
;
555 int saw_unset_tag
= 0;
557 /* check the root's tag bit */
558 if (!root_tag_get(root
, tag
))
561 node
= rcu_dereference(root
->rnode
);
565 if (radix_tree_is_direct_ptr(node
))
568 height
= node
->height
;
569 if (index
> radix_tree_maxindex(height
))
572 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
580 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
583 * This is just a debug check. Later, we can bale as soon as
584 * we see an unset tag.
586 if (!tag_get(node
, tag
, offset
))
589 int ret
= tag_get(node
, tag
, offset
);
591 BUG_ON(ret
&& saw_unset_tag
);
594 node
= rcu_dereference(node
->slots
[offset
]);
595 shift
-= RADIX_TREE_MAP_SHIFT
;
599 EXPORT_SYMBOL(radix_tree_tag_get
);
603 __lookup(struct radix_tree_node
*slot
, void **results
, unsigned long index
,
604 unsigned int max_items
, unsigned long *next_index
)
606 unsigned int nr_found
= 0;
607 unsigned int shift
, height
;
610 height
= slot
->height
;
613 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
615 for ( ; height
> 1; height
--) {
616 i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
618 if (slot
->slots
[i
] != NULL
)
620 index
&= ~((1UL << shift
) - 1);
621 index
+= 1UL << shift
;
623 goto out
; /* 32-bit wraparound */
625 if (i
== RADIX_TREE_MAP_SIZE
)
629 shift
-= RADIX_TREE_MAP_SHIFT
;
630 slot
= rcu_dereference(slot
->slots
[i
]);
635 /* Bottom level: grab some items */
636 for (i
= index
& RADIX_TREE_MAP_MASK
; i
< RADIX_TREE_MAP_SIZE
; i
++) {
637 struct radix_tree_node
*node
;
639 node
= slot
->slots
[i
];
641 results
[nr_found
++] = rcu_dereference(node
);
642 if (nr_found
== max_items
)
652 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
653 * @root: radix tree root
654 * @results: where the results of the lookup are placed
655 * @first_index: start the lookup from this key
656 * @max_items: place up to this many items at *results
658 * Performs an index-ascending scan of the tree for present items. Places
659 * them at *@results and returns the number of items which were placed at
662 * The implementation is naive.
664 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
665 * rcu_read_lock. In this case, rather than the returned results being
666 * an atomic snapshot of the tree at a single point in time, the semantics
667 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
668 * have been issued in individual locks, and results stored in 'results'.
671 radix_tree_gang_lookup(struct radix_tree_root
*root
, void **results
,
672 unsigned long first_index
, unsigned int max_items
)
674 unsigned long max_index
;
675 struct radix_tree_node
*node
;
676 unsigned long cur_index
= first_index
;
679 node
= rcu_dereference(root
->rnode
);
683 if (radix_tree_is_direct_ptr(node
)) {
686 node
= radix_tree_direct_to_ptr(node
);
687 results
[0] = rcu_dereference(node
);
691 max_index
= radix_tree_maxindex(node
->height
);
694 while (ret
< max_items
) {
695 unsigned int nr_found
;
696 unsigned long next_index
; /* Index of next search */
698 if (cur_index
> max_index
)
700 nr_found
= __lookup(node
, results
+ ret
, cur_index
,
701 max_items
- ret
, &next_index
);
705 cur_index
= next_index
;
710 EXPORT_SYMBOL(radix_tree_gang_lookup
);
713 * FIXME: the two tag_get()s here should use find_next_bit() instead of
714 * open-coding the search.
717 __lookup_tag(struct radix_tree_node
*slot
, void **results
, unsigned long index
,
718 unsigned int max_items
, unsigned long *next_index
, unsigned int tag
)
720 unsigned int nr_found
= 0;
721 unsigned int shift
, height
;
723 height
= slot
->height
;
726 shift
= (height
-1) * RADIX_TREE_MAP_SHIFT
;
729 unsigned long i
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
732 if (tag_get(slot
, tag
, i
))
734 index
&= ~((1UL << shift
) - 1);
735 index
+= 1UL << shift
;
737 goto out
; /* 32-bit wraparound */
739 if (i
== RADIX_TREE_MAP_SIZE
)
743 if (height
== 0) { /* Bottom level: grab some items */
744 unsigned long j
= index
& RADIX_TREE_MAP_MASK
;
746 for ( ; j
< RADIX_TREE_MAP_SIZE
; j
++) {
747 struct radix_tree_node
*node
;
749 if (!tag_get(slot
, tag
, j
))
751 node
= slot
->slots
[j
];
753 * Even though the tag was found set, we need to
754 * recheck that we have a non-NULL node, because
755 * if this lookup is lockless, it may have been
756 * subsequently deleted.
758 * Similar care must be taken in any place that
759 * lookup ->slots[x] without a lock (ie. can't
760 * rely on its value remaining the same).
763 node
= rcu_dereference(node
);
764 results
[nr_found
++] = node
;
765 if (nr_found
== max_items
)
770 shift
-= RADIX_TREE_MAP_SHIFT
;
771 slot
= rcu_dereference(slot
->slots
[i
]);
781 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
783 * @root: radix tree root
784 * @results: where the results of the lookup are placed
785 * @first_index: start the lookup from this key
786 * @max_items: place up to this many items at *results
787 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
789 * Performs an index-ascending scan of the tree for present items which
790 * have the tag indexed by @tag set. Places the items at *@results and
791 * returns the number of items which were placed at *@results.
794 radix_tree_gang_lookup_tag(struct radix_tree_root
*root
, void **results
,
795 unsigned long first_index
, unsigned int max_items
,
798 struct radix_tree_node
*node
;
799 unsigned long max_index
;
800 unsigned long cur_index
= first_index
;
803 /* check the root's tag bit */
804 if (!root_tag_get(root
, tag
))
807 node
= rcu_dereference(root
->rnode
);
811 if (radix_tree_is_direct_ptr(node
)) {
814 node
= radix_tree_direct_to_ptr(node
);
815 results
[0] = rcu_dereference(node
);
819 max_index
= radix_tree_maxindex(node
->height
);
822 while (ret
< max_items
) {
823 unsigned int nr_found
;
824 unsigned long next_index
; /* Index of next search */
826 if (cur_index
> max_index
)
828 nr_found
= __lookup_tag(node
, results
+ ret
, cur_index
,
829 max_items
- ret
, &next_index
, tag
);
833 cur_index
= next_index
;
838 EXPORT_SYMBOL(radix_tree_gang_lookup_tag
);
841 * radix_tree_shrink - shrink height of a radix tree to minimal
842 * @root radix tree root
844 static inline void radix_tree_shrink(struct radix_tree_root
*root
)
846 /* try to shrink tree height */
847 while (root
->height
> 0 &&
848 root
->rnode
->count
== 1 &&
849 root
->rnode
->slots
[0]) {
850 struct radix_tree_node
*to_free
= root
->rnode
;
854 * We don't need rcu_assign_pointer(), since we are simply
855 * moving the node from one part of the tree to another. If
856 * it was safe to dereference the old pointer to it
857 * (to_free->slots[0]), it will be safe to dereference the new
860 newptr
= to_free
->slots
[0];
861 if (root
->height
== 1)
862 newptr
= radix_tree_ptr_to_direct(newptr
);
863 root
->rnode
= newptr
;
865 /* must only free zeroed nodes into the slab */
866 tag_clear(to_free
, 0, 0);
867 tag_clear(to_free
, 1, 0);
868 to_free
->slots
[0] = NULL
;
870 radix_tree_node_free(to_free
);
875 * radix_tree_delete - delete an item from a radix tree
876 * @root: radix tree root
879 * Remove the item at @index from the radix tree rooted at @root.
881 * Returns the address of the deleted item, or NULL if it was not present.
883 void *radix_tree_delete(struct radix_tree_root
*root
, unsigned long index
)
885 struct radix_tree_path path
[RADIX_TREE_MAX_PATH
], *pathp
= path
;
886 struct radix_tree_node
*slot
= NULL
;
887 struct radix_tree_node
*to_free
;
888 unsigned int height
, shift
;
892 height
= root
->height
;
893 if (index
> radix_tree_maxindex(height
))
897 if (height
== 0 && root
->rnode
) {
898 slot
= radix_tree_direct_to_ptr(slot
);
899 root_tag_clear_all(root
);
904 shift
= (height
- 1) * RADIX_TREE_MAP_SHIFT
;
912 offset
= (index
>> shift
) & RADIX_TREE_MAP_MASK
;
913 pathp
->offset
= offset
;
915 slot
= slot
->slots
[offset
];
916 shift
-= RADIX_TREE_MAP_SHIFT
;
918 } while (height
> 0);
924 * Clear all tags associated with the just-deleted item
926 for (tag
= 0; tag
< RADIX_TREE_MAX_TAGS
; tag
++) {
927 if (tag_get(pathp
->node
, tag
, pathp
->offset
))
928 radix_tree_tag_clear(root
, index
, tag
);
932 /* Now free the nodes we do not need anymore */
933 while (pathp
->node
) {
934 pathp
->node
->slots
[pathp
->offset
] = NULL
;
935 pathp
->node
->count
--;
937 * Queue the node for deferred freeing after the
938 * last reference to it disappears (set NULL, above).
941 radix_tree_node_free(to_free
);
943 if (pathp
->node
->count
) {
944 if (pathp
->node
== root
->rnode
)
945 radix_tree_shrink(root
);
949 /* Node with zero slots in use so free it */
950 to_free
= pathp
->node
;
954 root_tag_clear_all(root
);
958 radix_tree_node_free(to_free
);
963 EXPORT_SYMBOL(radix_tree_delete
);
966 * radix_tree_tagged - test whether any items in the tree are tagged
967 * @root: radix tree root
970 int radix_tree_tagged(struct radix_tree_root
*root
, unsigned int tag
)
972 return root_tag_get(root
, tag
);
974 EXPORT_SYMBOL(radix_tree_tagged
);
977 radix_tree_node_ctor(void *node
, struct kmem_cache
*cachep
, unsigned long flags
)
979 memset(node
, 0, sizeof(struct radix_tree_node
));
982 static __init
unsigned long __maxindex(unsigned int height
)
984 unsigned int tmp
= height
* RADIX_TREE_MAP_SHIFT
;
985 unsigned long index
= (~0UL >> (RADIX_TREE_INDEX_BITS
- tmp
- 1)) >> 1;
987 if (tmp
>= RADIX_TREE_INDEX_BITS
)
992 static __init
void radix_tree_init_maxindex(void)
996 for (i
= 0; i
< ARRAY_SIZE(height_to_maxindex
); i
++)
997 height_to_maxindex
[i
] = __maxindex(i
);
1000 static int radix_tree_callback(struct notifier_block
*nfb
,
1001 unsigned long action
,
1004 int cpu
= (long)hcpu
;
1005 struct radix_tree_preload
*rtp
;
1007 /* Free per-cpu pool of perloaded nodes */
1008 if (action
== CPU_DEAD
|| action
== CPU_DEAD_FROZEN
) {
1009 rtp
= &per_cpu(radix_tree_preloads
, cpu
);
1011 kmem_cache_free(radix_tree_node_cachep
,
1012 rtp
->nodes
[rtp
->nr
-1]);
1013 rtp
->nodes
[rtp
->nr
-1] = NULL
;
1020 void __init
radix_tree_init(void)
1022 radix_tree_node_cachep
= kmem_cache_create("radix_tree_node",
1023 sizeof(struct radix_tree_node
), 0,
1024 SLAB_PANIC
, radix_tree_node_ctor
);
1025 radix_tree_init_maxindex();
1026 hotcpu_notifier(radix_tree_callback
, 0);