2 * C macro implementation of treaps.
9 * Licensed under a two-clause BSD-style license.
10 * See LICENSE for details.
16 #define MAYBE_UNUSED __attribute__((__unused__))
29 /* Pointer/Offset conversion. */
30 #define trpn_pointer(a_base, a_offset) (a_base##_pointer(a_offset))
31 #define trpn_offset(a_base, a_pointer) (a_base##_offset(a_pointer))
32 #define trpn_modify(a_base, a_offset) \
34 if ((a_offset) < a_base##_pool.committed) { \
35 uint32_t old_offset = (a_offset);\
36 (a_offset) = a_base##_alloc(1); \
37 *trpn_pointer(a_base, a_offset) = \
38 *trpn_pointer(a_base, old_offset); \
43 #define trp_left_get(a_base, a_field, a_node) \
44 (trpn_pointer(a_base, a_node)->a_field.trpn_left)
45 #define trp_left_set(a_base, a_field, a_node, a_left) \
47 trpn_modify(a_base, a_node); \
48 trp_left_get(a_base, a_field, a_node) = (a_left); \
51 /* Right accessors. */
52 #define trp_right_get(a_base, a_field, a_node) \
53 (trpn_pointer(a_base, a_node)->a_field.trpn_right)
54 #define trp_right_set(a_base, a_field, a_node, a_right) \
56 trpn_modify(a_base, a_node); \
57 trp_right_get(a_base, a_field, a_node) = (a_right); \
61 * Fibonacci hash function.
62 * The multiplier is the nearest prime to (2^32 times (√5 - 1)/2).
63 * See Knuth §6.4: volume 3, 3rd ed, p518.
65 #define trpn_hash(a_node) (uint32_t) (2654435761u * (a_node))
67 /* Priority accessors. */
68 #define trp_prio_get(a_node) trpn_hash(a_node)
70 /* Node initializer. */
71 #define trp_node_new(a_base, a_field, a_node) \
73 trp_left_set(a_base, a_field, (a_node), ~0); \
74 trp_right_set(a_base, a_field, (a_node), ~0); \
77 /* Internal utility macros. */
78 #define trpn_first(a_base, a_field, a_root, r_node) \
80 (r_node) = (a_root); \
83 while (~trp_left_get(a_base, a_field, (r_node))) \
84 (r_node) = trp_left_get(a_base, a_field, (r_node)); \
87 #define trpn_rotate_left(a_base, a_field, a_node, r_node) \
89 (r_node) = trp_right_get(a_base, a_field, (a_node)); \
90 trp_right_set(a_base, a_field, (a_node), \
91 trp_left_get(a_base, a_field, (r_node))); \
92 trp_left_set(a_base, a_field, (r_node), (a_node)); \
95 #define trpn_rotate_right(a_base, a_field, a_node, r_node) \
97 (r_node) = trp_left_get(a_base, a_field, (a_node)); \
98 trp_left_set(a_base, a_field, (a_node), \
99 trp_right_get(a_base, a_field, (r_node))); \
100 trp_right_set(a_base, a_field, (r_node), (a_node)); \
103 #define trp_gen(a_attr, a_pre, a_type, a_field, a_base, a_cmp) \
104 a_attr a_type MAYBE_UNUSED *a_pre##first(struct trp_root *treap) \
107 trpn_first(a_base, a_field, treap->trp_root, ret); \
108 return trpn_pointer(a_base, ret); \
110 a_attr a_type MAYBE_UNUSED *a_pre##next(struct trp_root *treap, a_type *node) \
113 uint32_t offset = trpn_offset(a_base, node); \
114 if (~trp_right_get(a_base, a_field, offset)) { \
115 trpn_first(a_base, a_field, \
116 trp_right_get(a_base, a_field, offset), ret); \
118 uint32_t tnode = treap->trp_root; \
121 int cmp = (a_cmp)(trpn_pointer(a_base, offset), \
122 trpn_pointer(a_base, tnode)); \
125 tnode = trp_left_get(a_base, a_field, tnode); \
126 } else if (cmp > 0) { \
127 tnode = trp_right_get(a_base, a_field, tnode); \
133 return trpn_pointer(a_base, ret); \
135 a_attr a_type MAYBE_UNUSED *a_pre##search(struct trp_root *treap, a_type *key) \
138 uint32_t ret = treap->trp_root; \
139 while (~ret && (cmp = (a_cmp)(key, trpn_pointer(a_base, ret)))) { \
141 ret = trp_left_get(a_base, a_field, ret); \
143 ret = trp_right_get(a_base, a_field, ret); \
146 return trpn_pointer(a_base, ret); \
148 a_attr a_type MAYBE_UNUSED *a_pre##nsearch(struct trp_root *treap, a_type *key) \
151 uint32_t ret = treap->trp_root; \
152 while (~ret && (cmp = (a_cmp)(key, trpn_pointer(a_base, ret)))) { \
154 if (!~trp_left_get(a_base, a_field, ret)) \
156 ret = trp_left_get(a_base, a_field, ret); \
158 ret = trp_right_get(a_base, a_field, ret); \
161 return trpn_pointer(a_base, ret); \
163 a_attr uint32_t MAYBE_UNUSED a_pre##insert_recurse(uint32_t cur_node, uint32_t ins_node) \
165 if (cur_node == ~0) { \
169 int cmp = (a_cmp)(trpn_pointer(a_base, ins_node), \
170 trpn_pointer(a_base, cur_node)); \
172 uint32_t left = a_pre##insert_recurse( \
173 trp_left_get(a_base, a_field, cur_node), ins_node); \
174 trp_left_set(a_base, a_field, cur_node, left); \
175 if (trp_prio_get(left) < trp_prio_get(cur_node)) \
176 trpn_rotate_right(a_base, a_field, cur_node, ret); \
180 uint32_t right = a_pre##insert_recurse( \
181 trp_right_get(a_base, a_field, cur_node), ins_node); \
182 trp_right_set(a_base, a_field, cur_node, right); \
183 if (trp_prio_get(right) < trp_prio_get(cur_node)) \
184 trpn_rotate_left(a_base, a_field, cur_node, ret); \
191 a_attr void MAYBE_UNUSED a_pre##insert(struct trp_root *treap, a_type *node) \
193 uint32_t offset = trpn_offset(a_base, node); \
194 trp_node_new(a_base, a_field, offset); \
195 treap->trp_root = a_pre##insert_recurse(treap->trp_root, offset); \
197 a_attr uint32_t MAYBE_UNUSED a_pre##remove_recurse(uint32_t cur_node, uint32_t rem_node) \
199 int cmp = a_cmp(trpn_pointer(a_base, rem_node), \
200 trpn_pointer(a_base, cur_node)); \
203 uint32_t left = trp_left_get(a_base, a_field, cur_node); \
204 uint32_t right = trp_right_get(a_base, a_field, cur_node); \
208 } else if (right == ~0 || trp_prio_get(left) < trp_prio_get(right)) { \
209 trpn_rotate_right(a_base, a_field, cur_node, ret); \
210 right = a_pre##remove_recurse(cur_node, rem_node); \
211 trp_right_set(a_base, a_field, ret, right); \
214 trpn_rotate_left(a_base, a_field, cur_node, ret); \
215 left = a_pre##remove_recurse(cur_node, rem_node); \
216 trp_left_set(a_base, a_field, ret, left); \
218 } else if (cmp < 0) { \
219 uint32_t left = a_pre##remove_recurse( \
220 trp_left_get(a_base, a_field, cur_node), rem_node); \
221 trp_left_set(a_base, a_field, cur_node, left); \
224 uint32_t right = a_pre##remove_recurse( \
225 trp_right_get(a_base, a_field, cur_node), rem_node); \
226 trp_right_set(a_base, a_field, cur_node, right); \
230 a_attr void MAYBE_UNUSED a_pre##remove(struct trp_root *treap, a_type *node) \
232 treap->trp_root = a_pre##remove_recurse(treap->trp_root, \
233 trpn_offset(a_base, node)); \