2 * Lightweight Autonomic Network Architecture
4 * Original userspace code from D. J. Bernstein. (http://cr.yp.to/critbit.html)
5 * Added critbit_get method hack by instead of copying strings into the nodes
6 * (original version), we now hold the reference to it and fetch the container
7 * structure on lookups. By doing this, we only need to guarantee, that the
8 * string is power of two boundary aligned. Added RCU and kmem_cache aligned
9 * node allocation support.
11 * Copyright 2011 Daniel Borkmann <dborkma@tik.ee.ethz.ch>,
12 * Swiss federal institute of technology (ETH Zurich)
17 * Compared to a hash table, a crit-bit tree has comparable speed and two big
18 * advantages. The first advantage is that a crit-bit tree supports more fast
19 * operations: finding the smallest string, for example. The second advantage
20 * is that a crit-bit tree guarantees good performance: it doesn't have any
21 * tricky slowdowns for unusual (or malicious) data.
23 * Crit-bit trees are faster than comparison-based structures such as AVL trees
24 * and B-trees. They're also simpler, especially for variable-length strings.
26 * Crit-bit trees have the disadvantage of not (yet!) being widely appreciated.
27 * Very few textbooks explain them, and very few libraries implement them.
28 * (D. J. Bernstein, http://cr.yp.to/critbit.html)
31 #include <linux/kernel.h>
32 #include <linux/types.h>
33 #include <linux/module.h>
34 #include <linux/spinlock.h>
35 #include <linux/slab.h>
36 #include <linux/cache.h>
37 #include <linux/rcupdate.h>
38 #include <linux/atomic.h>
40 #include "xt_critbit.h"
42 typedef long intptr_t;
49 } ____cacheline_aligned
;
51 struct critbit_node_cache
{
52 struct kmem_cache
*cache
;
56 static struct critbit_node_cache critbit_cache
= { 0 };
58 static inline struct critbit_node
*critbit_alloc_node_aligned(gfp_t flags
)
60 __module_get(THIS_MODULE
);
61 return kmem_cache_alloc(critbit_cache
.cache
, flags
);
64 static inline void critbit_free_node(struct critbit_node
*p
)
66 kmem_cache_free(critbit_cache
.cache
, p
);
67 module_put(THIS_MODULE
);
70 int __critbit_contains(struct critbit_tree
*tree
, const char *elem
)
72 const u8
*ubytes
= (void *) elem
;
73 const size_t ulen
= strlen(elem
);
75 struct critbit_node
*q
;
78 if (unlikely(!rcu_read_lock_held())) {
79 printk(KERN_ERR
"No rcu_read_lock held!\n");
82 p
= rcu_dereference_raw(tree
->root
);
85 while (1 & (intptr_t) p
) {
90 direction
= (1 + (q
->otherbits
| c
)) >> 8;
91 p
= rcu_dereference_raw(q
->child
[direction
]);
94 return (0 == strcmp(elem
, (char *) p
));
96 EXPORT_SYMBOL(__critbit_contains
);
98 int critbit_contains(struct critbit_tree
*tree
, const char *elem
)
102 ret
= __critbit_contains(tree
, elem
);
106 EXPORT_SYMBOL(critbit_contains
);
108 char *__critbit_get(struct critbit_tree
*tree
, const char *elem
)
110 const u8
*ubytes
= (void *) elem
;
111 const size_t ulen
= strlen(elem
);
113 struct critbit_node
*q
;
116 if (unlikely(!rcu_read_lock_held())) {
117 printk(KERN_ERR
"No rcu_read_lock held!\n");
120 p
= rcu_dereference_raw(tree
->root
);
123 while (1 & (intptr_t) p
) {
125 q
= (void *) (p
- 1);
128 direction
= (1 + (q
->otherbits
| c
)) >> 8;
129 p
= rcu_dereference_raw(q
->child
[direction
]);
132 return (0 == strcmp(elem
, (char *) p
)) ? (char *) p
: NULL
;
134 EXPORT_SYMBOL(__critbit_get
);
136 char *critbit_get(struct critbit_tree
*tree
, const char *elem
)
140 ret
= __critbit_get(tree
, elem
);
144 EXPORT_SYMBOL(critbit_get
);
146 int __critbit_insert(struct critbit_tree
*tree
, char *elem
)
148 const u8
*const ubytes
= (void *) elem
;
149 const size_t ulen
= strlen(elem
);
150 u8 c
, *p
= rcu_dereference_raw(tree
->root
);
151 u32 newbyte
, newotherbits
;
152 struct critbit_node
*q
, *newnode
;
153 int direction
, newdirection
;
156 if (unlikely(!IS_ALIGNED((unsigned long) elem
, SMP_CACHE_BYTES
))) {
157 printk(KERN_ERR
"[lana] Your string is not power "
158 "of two aligned!\n");
162 rcu_assign_pointer(tree
->root
, elem
);
166 while (1 & (intptr_t) p
) {
168 q
= (void *) (p
- 1);
171 direction
= (1 + (q
->otherbits
| c
)) >> 8;
172 p
= rcu_dereference_raw(q
->child
[direction
]);
175 for (newbyte
= 0; newbyte
< ulen
; ++newbyte
) {
176 if (p
[newbyte
] != ubytes
[newbyte
]) {
177 newotherbits
= p
[newbyte
] ^ ubytes
[newbyte
];
178 goto different_byte_found
;
182 if (p
[newbyte
] != 0) {
183 newotherbits
= p
[newbyte
];
184 goto different_byte_found
;
189 different_byte_found
:
190 while (newotherbits
& (newotherbits
- 1))
191 newotherbits
&= newotherbits
- 1;
194 newdirection
= (1 + (newotherbits
| c
)) >> 8;
195 newnode
= critbit_alloc_node_aligned(GFP_ATOMIC
);
198 newnode
->byte
= newbyte
;
199 newnode
->otherbits
= newotherbits
;
200 newnode
->child
[1 - newdirection
] = elem
;
202 for (wherep
= &tree
->root
;;) {
204 if (!(1 & (intptr_t) p
))
206 q
= (void *) (p
- 1);
207 if (q
->byte
> newbyte
)
209 if (q
->byte
== newbyte
&& q
->otherbits
> newotherbits
)
214 direction
= (1 + (q
->otherbits
| c
)) >> 8;
215 wherep
= q
->child
+ direction
;
218 newnode
->child
[newdirection
] = *wherep
;
219 rcu_assign_pointer(*wherep
, (void *) (1 + (char *) newnode
));
222 EXPORT_SYMBOL(__critbit_insert
);
224 int critbit_insert(struct critbit_tree
*tree
, char *elem
)
228 spin_lock_irqsave(&tree
->wr_lock
, flags
);
229 ret
= __critbit_insert(tree
, elem
);
230 spin_unlock_irqrestore(&tree
->wr_lock
, flags
);
233 EXPORT_SYMBOL(critbit_insert
);
235 static void critbit_do_free_rcu(struct rcu_head
*rp
)
237 struct critbit_node
*p
= container_of(rp
, struct critbit_node
, rcu
);
238 critbit_free_node(p
);
241 int __critbit_delete(struct critbit_tree
*tree
, const char *elem
)
243 const u8
*ubytes
= (void *) elem
;
244 const size_t ulen
= strlen(elem
);
245 u8 c
, *p
= rcu_dereference_raw(tree
->root
);
246 void **wherep
= &tree
->root
;
247 void **whereq
= NULL
;
248 struct critbit_node
*q
= NULL
;
253 while (1 & (intptr_t) p
) {
255 q
= (void *) (p
- 1);
259 direction
= (1 + (q
->otherbits
| c
)) >> 8;
260 wherep
= q
->child
+ direction
;
264 if (0 != strcmp(elem
, (char *) p
))
266 /* Here, we could decrement a refcount to the elem. */
272 rcu_assign_pointer(*whereq
, q
->child
[1 - direction
]);
273 call_rcu(&q
->rcu
, critbit_do_free_rcu
);
276 EXPORT_SYMBOL(__critbit_delete
);
278 int critbit_delete(struct critbit_tree
*tree
, const char *elem
)
282 spin_lock_irqsave(&tree
->wr_lock
, flags
);
283 ret
= __critbit_delete(tree
, elem
);
284 spin_unlock_irqrestore(&tree
->wr_lock
, flags
);
287 EXPORT_SYMBOL(critbit_delete
);
289 static void critbit_ctor(void *obj
)
291 struct critbit_node
*node
= obj
;
292 node
->child
[0] = node
->child
[1] = NULL
;
295 static int critbit_node_cache_init(void)
297 atomic_set(&critbit_cache
.refcnt
, 1);
298 critbit_cache
.cache
= kmem_cache_create("critbit",
299 sizeof(struct critbit_node
),
300 0, SLAB_HWCACHE_ALIGN
,
302 if (!critbit_cache
.cache
)
307 static void critbit_node_cache_destroy(void)
310 kmem_cache_destroy(critbit_cache
.cache
);
313 void get_critbit_cache(void)
315 if (unlikely(!atomic_read(&critbit_cache
.refcnt
))) {
316 if (critbit_node_cache_init())
317 panic("No mem left for critbit cache!\n");
319 atomic_inc(&critbit_cache
.refcnt
);
321 EXPORT_SYMBOL(get_critbit_cache
);
323 void put_critbit_cache(void)
325 if (likely(!atomic_dec_and_test(&critbit_cache
.refcnt
)))
327 critbit_node_cache_destroy();
329 EXPORT_SYMBOL(put_critbit_cache
);