Tue Jan 28 16:33:34 2020 UTC ()
Add a radix_tree_await_memory(), for kernel use.


(ad)
diff -r1.21 -r1.22 src/common/lib/libc/gen/radixtree.c
diff -r1.6 -r1.7 src/sys/sys/radixtree.h

cvs diff -r1.21 -r1.22 src/common/lib/libc/gen/radixtree.c (switch to unified diff)

--- src/common/lib/libc/gen/radixtree.c 2020/01/12 20:00:41 1.21
+++ src/common/lib/libc/gen/radixtree.c 2020/01/28 16:33:34 1.22
@@ -1,1350 +1,1367 @@ @@ -1,1350 +1,1367 @@
1/* $NetBSD: radixtree.c,v 1.21 2020/01/12 20:00:41 para Exp $ */ 1/* $NetBSD: radixtree.c,v 1.22 2020/01/28 16:33:34 ad Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c)2011,2012,2013 YAMAMOTO Takashi, 4 * Copyright (c)2011,2012,2013 YAMAMOTO Takashi,
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * Redistribution and use in source and binary forms, with or without 7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions 8 * modification, are permitted provided that the following conditions
9 * are met: 9 * are met:
10 * 1. Redistributions of source code must retain the above copyright 10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer. 11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright 12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the 13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution. 14 * documentation and/or other materials provided with the distribution.
15 * 15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE. 26 * SUCH DAMAGE.
27 */ 27 */
28 28
29/* 29/*
30 * radixtree.c 30 * radixtree.c
31 * 31 *
32 * Overview: 32 * Overview:
33 * 33 *
34 * This is an implementation of radix tree, whose keys are uint64_t and leafs 34 * This is an implementation of radix tree, whose keys are uint64_t and leafs
35 * are user provided pointers. 35 * are user provided pointers.
36 * 36 *
37 * Leaf nodes are just void * and this implementation doesn't care about 37 * Leaf nodes are just void * and this implementation doesn't care about
38 * what they actually point to. However, this implementation has an assumption 38 * what they actually point to. However, this implementation has an assumption
39 * about their alignment. Specifically, this implementation assumes that their 39 * about their alignment. Specifically, this implementation assumes that their
40 * 2 LSBs are always zero and uses them for internal accounting. 40 * 2 LSBs are always zero and uses them for internal accounting.
41 * 41 *
42 * Intermediate nodes and memory allocation: 42 * Intermediate nodes and memory allocation:
43 * 43 *
44 * Intermediate nodes are automatically allocated and freed internally and 44 * Intermediate nodes are automatically allocated and freed internally and
45 * basically users don't need to care about them. The allocation is done via 45 * basically users don't need to care about them. The allocation is done via
46 * pool_cache_get(9) for _KERNEL, malloc(3) for userland, and alloc() for 46 * pool_cache_get(9) for _KERNEL, malloc(3) for userland, and alloc() for
47 * _STANDALONE environment. Only radix_tree_insert_node function can allocate 47 * _STANDALONE environment. Only radix_tree_insert_node function can allocate
48 * memory for intermediate nodes and thus can fail for ENOMEM. 48 * memory for intermediate nodes and thus can fail for ENOMEM.
49 * 49 *
50 * Memory Efficiency: 50 * Memory Efficiency:
51 * 51 *
52 * It's designed to work efficiently with dense index distribution. 52 * It's designed to work efficiently with dense index distribution.
53 * The memory consumption (number of necessary intermediate nodes) heavily 53 * The memory consumption (number of necessary intermediate nodes) heavily
54 * depends on the index distribution. Basically, more dense index distribution 54 * depends on the index distribution. Basically, more dense index distribution
55 * consumes less nodes per item. Approximately, 55 * consumes less nodes per item. Approximately,
56 * 56 *
57 * - the best case: about RADIX_TREE_PTR_PER_NODE items per intermediate node. 57 * - the best case: about RADIX_TREE_PTR_PER_NODE items per intermediate node.
58 * it would look like the following. 58 * it would look like the following.
59 * 59 *
60 * root (t_height=1) 60 * root (t_height=1)
61 * | 61 * |
62 * v 62 * v
63 * [ | | | ] (intermediate node. RADIX_TREE_PTR_PER_NODE=4 in this fig) 63 * [ | | | ] (intermediate node. RADIX_TREE_PTR_PER_NODE=4 in this fig)
64 * | | | | 64 * | | | |
65 * v v v v 65 * v v v v
66 * p p p p (items) 66 * p p p p (items)
67 * 67 *
68 * - the worst case: RADIX_TREE_MAX_HEIGHT intermediate nodes per item. 68 * - the worst case: RADIX_TREE_MAX_HEIGHT intermediate nodes per item.
69 * it would look like the following if RADIX_TREE_MAX_HEIGHT=3. 69 * it would look like the following if RADIX_TREE_MAX_HEIGHT=3.
70 * 70 *
71 * root (t_height=3) 71 * root (t_height=3)
72 * | 72 * |
73 * v 73 * v
74 * [ | | | ] 74 * [ | | | ]
75 * | 75 * |
76 * v 76 * v
77 * [ | | | ] 77 * [ | | | ]
78 * | 78 * |
79 * v 79 * v
80 * [ | | | ] 80 * [ | | | ]
81 * | 81 * |
82 * v 82 * v
83 * p 83 * p
84 * 84 *
85 * The height of tree (t_height) is dynamic. It's smaller if only small 85 * The height of tree (t_height) is dynamic. It's smaller if only small
86 * index values are used. As an extreme case, if only index 0 is used, 86 * index values are used. As an extreme case, if only index 0 is used,
87 * the corresponding value is directly stored in the root of the tree 87 * the corresponding value is directly stored in the root of the tree
88 * (struct radix_tree) without allocating any intermediate nodes. In that 88 * (struct radix_tree) without allocating any intermediate nodes. In that
89 * case, t_height=0. 89 * case, t_height=0.
90 * 90 *
91 * Gang lookup: 91 * Gang lookup:
92 * 92 *
93 * This implementation provides a way to scan many nodes quickly via 93 * This implementation provides a way to scan many nodes quickly via
94 * radix_tree_gang_lookup_node function and its varients. 94 * radix_tree_gang_lookup_node function and its varients.
95 * 95 *
96 * Tags: 96 * Tags:
97 * 97 *
98 * This implementation provides tagging functionality, which allows quick 98 * This implementation provides tagging functionality, which allows quick
99 * scanning of a subset of leaf nodes. Leaf nodes are untagged when inserted 99 * scanning of a subset of leaf nodes. Leaf nodes are untagged when inserted
100 * into the tree and can be tagged by radix_tree_set_tag function. 100 * into the tree and can be tagged by radix_tree_set_tag function.
101 * radix_tree_gang_lookup_tagged_node function and its variants returns only 101 * radix_tree_gang_lookup_tagged_node function and its variants returns only
102 * leaf nodes with the given tag. To reduce amount of nodes to visit for 102 * leaf nodes with the given tag. To reduce amount of nodes to visit for
103 * these functions, this implementation keeps tagging information in internal 103 * these functions, this implementation keeps tagging information in internal
104 * intermediate nodes and quickly skips uninterested parts of a tree. 104 * intermediate nodes and quickly skips uninterested parts of a tree.
105 * 105 *
106 * A tree has RADIX_TREE_TAG_ID_MAX independent tag spaces, each of which are 106 * A tree has RADIX_TREE_TAG_ID_MAX independent tag spaces, each of which are
107 * identified by an zero-origin numbers, tagid. For the current implementation, 107 * identified by an zero-origin numbers, tagid. For the current implementation,
108 * RADIX_TREE_TAG_ID_MAX is 2. A set of tags is described as a bitmask tagmask, 108 * RADIX_TREE_TAG_ID_MAX is 2. A set of tags is described as a bitmask tagmask,
109 * which is a bitwise OR of (1 << tagid). 109 * which is a bitwise OR of (1 << tagid).
110 */ 110 */
111 111
112#include <sys/cdefs.h> 112#include <sys/cdefs.h>
113 113
114#if defined(_KERNEL) || defined(_STANDALONE) 114#if defined(_KERNEL) || defined(_STANDALONE)
115__KERNEL_RCSID(0, "$NetBSD: radixtree.c,v 1.21 2020/01/12 20:00:41 para Exp $"); 115__KERNEL_RCSID(0, "$NetBSD: radixtree.c,v 1.22 2020/01/28 16:33:34 ad Exp $");
116#include <sys/param.h> 116#include <sys/param.h>
117#include <sys/errno.h> 117#include <sys/errno.h>
118#include <sys/pool.h> 118#include <sys/pool.h>
119#include <sys/radixtree.h> 119#include <sys/radixtree.h>
120#include <lib/libkern/libkern.h> 120#include <lib/libkern/libkern.h>
121#if defined(_STANDALONE) 121#if defined(_STANDALONE)
122#include <lib/libsa/stand.h> 122#include <lib/libsa/stand.h>
123#endif /* defined(_STANDALONE) */ 123#endif /* defined(_STANDALONE) */
124#else /* defined(_KERNEL) || defined(_STANDALONE) */ 124#else /* defined(_KERNEL) || defined(_STANDALONE) */
125__RCSID("$NetBSD: radixtree.c,v 1.21 2020/01/12 20:00:41 para Exp $"); 125__RCSID("$NetBSD: radixtree.c,v 1.22 2020/01/28 16:33:34 ad Exp $");
126#include <assert.h> 126#include <assert.h>
127#include <errno.h> 127#include <errno.h>
128#include <stdbool.h> 128#include <stdbool.h>
129#include <stdlib.h> 129#include <stdlib.h>
130#include <string.h> 130#include <string.h>
131#if 1 131#if 1
132#define KASSERT assert 132#define KASSERT assert
133#else 133#else
134#define KASSERT(a) /* nothing */ 134#define KASSERT(a) /* nothing */
135#endif 135#endif
136#endif /* defined(_KERNEL) || defined(_STANDALONE) */ 136#endif /* defined(_KERNEL) || defined(_STANDALONE) */
137 137
138#include <sys/radixtree.h> 138#include <sys/radixtree.h>
139 139
140#define RADIX_TREE_BITS_PER_HEIGHT 4 /* XXX tune */ 140#define RADIX_TREE_BITS_PER_HEIGHT 4 /* XXX tune */
141#define RADIX_TREE_PTR_PER_NODE (1 << RADIX_TREE_BITS_PER_HEIGHT) 141#define RADIX_TREE_PTR_PER_NODE (1 << RADIX_TREE_BITS_PER_HEIGHT)
142#define RADIX_TREE_MAX_HEIGHT (64 / RADIX_TREE_BITS_PER_HEIGHT) 142#define RADIX_TREE_MAX_HEIGHT (64 / RADIX_TREE_BITS_PER_HEIGHT)
143#define RADIX_TREE_INVALID_HEIGHT (RADIX_TREE_MAX_HEIGHT + 1) 143#define RADIX_TREE_INVALID_HEIGHT (RADIX_TREE_MAX_HEIGHT + 1)
144__CTASSERT((64 % RADIX_TREE_BITS_PER_HEIGHT) == 0); 144__CTASSERT((64 % RADIX_TREE_BITS_PER_HEIGHT) == 0);
145 145
146__CTASSERT(((1 << RADIX_TREE_TAG_ID_MAX) & (sizeof(int) - 1)) == 0); 146__CTASSERT(((1 << RADIX_TREE_TAG_ID_MAX) & (sizeof(int) - 1)) == 0);
147#define RADIX_TREE_TAG_MASK ((1 << RADIX_TREE_TAG_ID_MAX) - 1) 147#define RADIX_TREE_TAG_MASK ((1 << RADIX_TREE_TAG_ID_MAX) - 1)
148 148
149static inline void * 149static inline void *
150entry_ptr(void *p) 150entry_ptr(void *p)
151{ 151{
152 152
153 return (void *)((uintptr_t)p & ~RADIX_TREE_TAG_MASK); 153 return (void *)((uintptr_t)p & ~RADIX_TREE_TAG_MASK);
154} 154}
155 155
156static inline unsigned int 156static inline unsigned int
157entry_tagmask(void *p) 157entry_tagmask(void *p)
158{ 158{
159 159
160 return (uintptr_t)p & RADIX_TREE_TAG_MASK; 160 return (uintptr_t)p & RADIX_TREE_TAG_MASK;
161} 161}
162 162
163static inline void * 163static inline void *
164entry_compose(void *p, unsigned int tagmask) 164entry_compose(void *p, unsigned int tagmask)
165{ 165{
166 166
167 return (void *)((uintptr_t)p | tagmask); 167 return (void *)((uintptr_t)p | tagmask);
168} 168}
169 169
170static inline bool 170static inline bool
171entry_match_p(void *p, unsigned int tagmask) 171entry_match_p(void *p, unsigned int tagmask)
172{ 172{
173 173
174 KASSERT(entry_ptr(p) != NULL || entry_tagmask(p) == 0); 174 KASSERT(entry_ptr(p) != NULL || entry_tagmask(p) == 0);
175 if (p == NULL) { 175 if (p == NULL) {
176 return false; 176 return false;
177 } 177 }
178 if (tagmask == 0) { 178 if (tagmask == 0) {
179 return true; 179 return true;
180 } 180 }
181 return (entry_tagmask(p) & tagmask) != 0; 181 return (entry_tagmask(p) & tagmask) != 0;
182} 182}
183 183
184/* 184/*
185 * radix_tree_node: an intermediate node 185 * radix_tree_node: an intermediate node
186 * 186 *
187 * we don't care the type of leaf nodes. they are just void *. 187 * we don't care the type of leaf nodes. they are just void *.
188 * 188 *
189 * we used to maintain a count of non-NULL nodes in this structure, but it 189 * we used to maintain a count of non-NULL nodes in this structure, but it
190 * prevented it from being aligned to a cache line boundary; the performance 190 * prevented it from being aligned to a cache line boundary; the performance
191 * benefit from being cache friendly is greater than the benefit of having 191 * benefit from being cache friendly is greater than the benefit of having
192 * a dedicated count value, especially in multi-processor situations where 192 * a dedicated count value, especially in multi-processor situations where
193 * we need to avoid intra-pool-page false sharing. 193 * we need to avoid intra-pool-page false sharing.
194 */ 194 */
195 195
196struct radix_tree_node { 196struct radix_tree_node {
197 void *n_ptrs[RADIX_TREE_PTR_PER_NODE]; 197 void *n_ptrs[RADIX_TREE_PTR_PER_NODE];
198}; 198};
199 199
200/* 200/*
201 * any_children_tagmask: 201 * any_children_tagmask:
202 * 202 *
203 * return OR'ed tagmask of the given node's children. 203 * return OR'ed tagmask of the given node's children.
204 */ 204 */
205 205
206static unsigned int 206static unsigned int
207any_children_tagmask(const struct radix_tree_node *n) 207any_children_tagmask(const struct radix_tree_node *n)
208{ 208{
209 unsigned int mask; 209 unsigned int mask;
210 int i; 210 int i;
211 211
212 mask = 0; 212 mask = 0;
213 for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) { 213 for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
214 mask |= (unsigned int)(uintptr_t)n->n_ptrs[i]; 214 mask |= (unsigned int)(uintptr_t)n->n_ptrs[i];
215 } 215 }
216 return mask & RADIX_TREE_TAG_MASK; 216 return mask & RADIX_TREE_TAG_MASK;
217} 217}
218 218
219/* 219/*
220 * p_refs[0].pptr == &t->t_root 220 * p_refs[0].pptr == &t->t_root
221 * : 221 * :
222 * p_refs[n].pptr == &(*p_refs[n-1])->n_ptrs[x] 222 * p_refs[n].pptr == &(*p_refs[n-1])->n_ptrs[x]
223 * : 223 * :
224 * : 224 * :
225 * p_refs[t->t_height].pptr == &leaf_pointer 225 * p_refs[t->t_height].pptr == &leaf_pointer
226 */ 226 */
227 227
228struct radix_tree_path { 228struct radix_tree_path {
229 struct radix_tree_node_ref { 229 struct radix_tree_node_ref {
230 void **pptr; 230 void **pptr;
231 } p_refs[RADIX_TREE_MAX_HEIGHT + 1]; /* +1 for the root ptr */ 231 } p_refs[RADIX_TREE_MAX_HEIGHT + 1]; /* +1 for the root ptr */
232 /* 232 /*
233 * p_lastidx is either the index of the last valid element of p_refs[] 233 * p_lastidx is either the index of the last valid element of p_refs[]
234 * or RADIX_TREE_INVALID_HEIGHT. 234 * or RADIX_TREE_INVALID_HEIGHT.
235 * RADIX_TREE_INVALID_HEIGHT means that radix_tree_lookup_ptr found 235 * RADIX_TREE_INVALID_HEIGHT means that radix_tree_lookup_ptr found
236 * that the height of the tree is not enough to cover the given index. 236 * that the height of the tree is not enough to cover the given index.
237 */ 237 */
238 unsigned int p_lastidx; 238 unsigned int p_lastidx;
239}; 239};
240 240
241static inline void ** 241static inline void **
242path_pptr(const struct radix_tree *t, const struct radix_tree_path *p, 242path_pptr(const struct radix_tree *t, const struct radix_tree_path *p,
243 unsigned int height) 243 unsigned int height)
244{ 244{
245 245
246 KASSERT(height <= t->t_height); 246 KASSERT(height <= t->t_height);
247 return p->p_refs[height].pptr; 247 return p->p_refs[height].pptr;
248} 248}
249 249
250static inline struct radix_tree_node * 250static inline struct radix_tree_node *
251path_node(const struct radix_tree * t, const struct radix_tree_path *p, 251path_node(const struct radix_tree * t, const struct radix_tree_path *p,
252 unsigned int height) 252 unsigned int height)
253{ 253{
254 254
255 KASSERT(height <= t->t_height); 255 KASSERT(height <= t->t_height);
256 return entry_ptr(*path_pptr(t, p, height)); 256 return entry_ptr(*path_pptr(t, p, height));
257} 257}
258 258
259/* 259/*
260 * radix_tree_init_tree: 260 * radix_tree_init_tree:
261 * 261 *
262 * Initialize a tree. 262 * Initialize a tree.
263 */ 263 */
264 264
265void 265void
266radix_tree_init_tree(struct radix_tree *t) 266radix_tree_init_tree(struct radix_tree *t)
267{ 267{
268 268
269 t->t_height = 0; 269 t->t_height = 0;
270 t->t_root = NULL; 270 t->t_root = NULL;
271} 271}
272 272
273/* 273/*
274 * radix_tree_fini_tree: 274 * radix_tree_fini_tree:
275 * 275 *
276 * Finish using a tree. 276 * Finish using a tree.
277 */ 277 */
278 278
279void 279void
280radix_tree_fini_tree(struct radix_tree *t) 280radix_tree_fini_tree(struct radix_tree *t)
281{ 281{
282 282
283 KASSERT(t->t_root == NULL); 283 KASSERT(t->t_root == NULL);
284 KASSERT(t->t_height == 0); 284 KASSERT(t->t_height == 0);
285} 285}
286 286
287/* 287/*
288 * radix_tree_empty_tree_p: 288 * radix_tree_empty_tree_p:
289 * 289 *
290 * Return if the tree is empty. 290 * Return if the tree is empty.
291 */ 291 */
292 292
293bool 293bool
294radix_tree_empty_tree_p(struct radix_tree *t) 294radix_tree_empty_tree_p(struct radix_tree *t)
295{ 295{
296 296
297 return t->t_root == NULL; 297 return t->t_root == NULL;
298} 298}
299 299
300/* 300/*
301 * radix_tree_empty_tree_p: 301 * radix_tree_empty_tree_p:
302 * 302 *
303 * Return true if the tree has any nodes with the given tag. Otherwise 303 * Return true if the tree has any nodes with the given tag. Otherwise
304 * return false. 304 * return false.
305 * 305 *
306 * It's illegal to call this function with tagmask 0. 306 * It's illegal to call this function with tagmask 0.
307 */ 307 */
308 308
309bool 309bool
310radix_tree_empty_tagged_tree_p(struct radix_tree *t, unsigned int tagmask) 310radix_tree_empty_tagged_tree_p(struct radix_tree *t, unsigned int tagmask)
311{ 311{
312 312
313 KASSERT(tagmask != 0); 313 KASSERT(tagmask != 0);
314 return (entry_tagmask(t->t_root) & tagmask) == 0; 314 return (entry_tagmask(t->t_root) & tagmask) == 0;
315} 315}
316 316
317static void 317static void
318radix_tree_node_init(struct radix_tree_node *n) 318radix_tree_node_init(struct radix_tree_node *n)
319{ 319{
320 320
321 memset(n, 0, sizeof(*n)); 321 memset(n, 0, sizeof(*n));
322} 322}
323 323
324#if defined(_KERNEL) 324#if defined(_KERNEL)
325pool_cache_t radix_tree_node_cache __read_mostly; 325pool_cache_t radix_tree_node_cache __read_mostly;
326 326
327static int 327static int
328radix_tree_node_ctor(void *dummy, void *item, int flags) 328radix_tree_node_ctor(void *dummy, void *item, int flags)
329{ 329{
330 struct radix_tree_node *n = item; 330 struct radix_tree_node *n = item;
331 331
332 KASSERT(dummy == NULL); 332 KASSERT(dummy == NULL);
333 radix_tree_node_init(n); 333 radix_tree_node_init(n);
334 return 0; 334 return 0;
335} 335}
336 336
337/* 337/*
338 * radix_tree_init: 338 * radix_tree_init:
339 * 339 *
340 * initialize the subsystem. 340 * initialize the subsystem.
341 */ 341 */
342 342
343void 343void
344radix_tree_init(void) 344radix_tree_init(void)
345{ 345{
346 346
347 radix_tree_node_cache = pool_cache_init(sizeof(struct radix_tree_node), 347 radix_tree_node_cache = pool_cache_init(sizeof(struct radix_tree_node),
348 coherency_unit, 0, PR_LARGECACHE, "radixnode", NULL, IPL_NONE, 348 coherency_unit, 0, PR_LARGECACHE, "radixnode", NULL, IPL_NONE,
349 radix_tree_node_ctor, NULL, NULL); 349 radix_tree_node_ctor, NULL, NULL);
350 KASSERT(radix_tree_node_cache != NULL); 350 KASSERT(radix_tree_node_cache != NULL);
351} 351}
 352
 353/*
 354 * radix_tree_await_memory:
 355 *
 356 * after an insert has failed with ENOMEM, wait for memory to become
 357 * available, so the caller can retry.
 358 */
 359
 360void
 361radix_tree_await_memory(void)
 362{
 363 struct radix_tree_node *n;
 364
 365 n = pool_cache_get(radix_tree_node_cache, PR_WAITOK);
 366 pool_cache_put(radix_tree_node_cache, n);
 367}
 368
352#endif /* defined(_KERNEL) */ 369#endif /* defined(_KERNEL) */
353 370
354static bool __unused 371static bool __unused
355radix_tree_node_clean_p(const struct radix_tree_node *n) 372radix_tree_node_clean_p(const struct radix_tree_node *n)
356{ 373{
357#if RADIX_TREE_PTR_PER_NODE > 16 374#if RADIX_TREE_PTR_PER_NODE > 16
358 unsigned int i; 375 unsigned int i;
359 376
360 for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) { 377 for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
361 if (n->n_ptrs[i] != NULL) { 378 if (n->n_ptrs[i] != NULL) {
362 return false; 379 return false;
363 } 380 }
364 } 381 }
365 return true; 382 return true;
366#else /* RADIX_TREE_PTR_PER_NODE > 16 */ 383#else /* RADIX_TREE_PTR_PER_NODE > 16 */
367 uintptr_t sum; 384 uintptr_t sum;
368 385
369 /* 386 /*
370 * Unrolling the above is much better than a tight loop with two 387 * Unrolling the above is much better than a tight loop with two
371 * test+branch pairs. On x86 with gcc 5.5.0 this compiles into 19 388 * test+branch pairs. On x86 with gcc 5.5.0 this compiles into 19
372 * deterministic instructions including the "return" and prologue & 389 * deterministic instructions including the "return" and prologue &
373 * epilogue. 390 * epilogue.
374 */ 391 */
375 sum = (uintptr_t)n->n_ptrs[0]; 392 sum = (uintptr_t)n->n_ptrs[0];
376 sum |= (uintptr_t)n->n_ptrs[1]; 393 sum |= (uintptr_t)n->n_ptrs[1];
377 sum |= (uintptr_t)n->n_ptrs[2]; 394 sum |= (uintptr_t)n->n_ptrs[2];
378 sum |= (uintptr_t)n->n_ptrs[3]; 395 sum |= (uintptr_t)n->n_ptrs[3];
379#if RADIX_TREE_PTR_PER_NODE > 4 396#if RADIX_TREE_PTR_PER_NODE > 4
380 sum |= (uintptr_t)n->n_ptrs[4]; 397 sum |= (uintptr_t)n->n_ptrs[4];
381 sum |= (uintptr_t)n->n_ptrs[5]; 398 sum |= (uintptr_t)n->n_ptrs[5];
382 sum |= (uintptr_t)n->n_ptrs[6]; 399 sum |= (uintptr_t)n->n_ptrs[6];
383 sum |= (uintptr_t)n->n_ptrs[7]; 400 sum |= (uintptr_t)n->n_ptrs[7];
384#endif 401#endif
385#if RADIX_TREE_PTR_PER_NODE > 8 402#if RADIX_TREE_PTR_PER_NODE > 8
386 sum |= (uintptr_t)n->n_ptrs[8]; 403 sum |= (uintptr_t)n->n_ptrs[8];
387 sum |= (uintptr_t)n->n_ptrs[9]; 404 sum |= (uintptr_t)n->n_ptrs[9];
388 sum |= (uintptr_t)n->n_ptrs[10]; 405 sum |= (uintptr_t)n->n_ptrs[10];
389 sum |= (uintptr_t)n->n_ptrs[11]; 406 sum |= (uintptr_t)n->n_ptrs[11];
390 sum |= (uintptr_t)n->n_ptrs[12]; 407 sum |= (uintptr_t)n->n_ptrs[12];
391 sum |= (uintptr_t)n->n_ptrs[13]; 408 sum |= (uintptr_t)n->n_ptrs[13];
392 sum |= (uintptr_t)n->n_ptrs[14]; 409 sum |= (uintptr_t)n->n_ptrs[14];
393 sum |= (uintptr_t)n->n_ptrs[15]; 410 sum |= (uintptr_t)n->n_ptrs[15];
394#endif 411#endif
395 return sum == 0; 412 return sum == 0;
396#endif /* RADIX_TREE_PTR_PER_NODE > 16 */ 413#endif /* RADIX_TREE_PTR_PER_NODE > 16 */
397} 414}
398 415
399static int __unused 416static int __unused
400radix_tree_node_count_ptrs(const struct radix_tree_node *n) 417radix_tree_node_count_ptrs(const struct radix_tree_node *n)
401{ 418{
402 unsigned int i, c; 419 unsigned int i, c;
403 420
404 for (i = c = 0; i < RADIX_TREE_PTR_PER_NODE; i++) { 421 for (i = c = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
405 c += (n->n_ptrs[i] != NULL); 422 c += (n->n_ptrs[i] != NULL);
406 } 423 }
407 return c; 424 return c;
408} 425}
409 426
410static struct radix_tree_node * 427static struct radix_tree_node *
411radix_tree_alloc_node(void) 428radix_tree_alloc_node(void)
412{ 429{
413 struct radix_tree_node *n; 430 struct radix_tree_node *n;
414 431
415#if defined(_KERNEL) 432#if defined(_KERNEL)
416 /* 433 /*
417 * note that pool_cache_get can block. 434 * note that pool_cache_get can block.
418 */ 435 */
419 n = pool_cache_get(radix_tree_node_cache, PR_NOWAIT); 436 n = pool_cache_get(radix_tree_node_cache, PR_NOWAIT);
420#else /* defined(_KERNEL) */ 437#else /* defined(_KERNEL) */
421#if defined(_STANDALONE) 438#if defined(_STANDALONE)
422 n = alloc(sizeof(*n)); 439 n = alloc(sizeof(*n));
423#else /* defined(_STANDALONE) */ 440#else /* defined(_STANDALONE) */
424 n = malloc(sizeof(*n)); 441 n = malloc(sizeof(*n));
425#endif /* defined(_STANDALONE) */ 442#endif /* defined(_STANDALONE) */
426 if (n != NULL) { 443 if (n != NULL) {
427 radix_tree_node_init(n); 444 radix_tree_node_init(n);
428 } 445 }
429#endif /* defined(_KERNEL) */ 446#endif /* defined(_KERNEL) */
430 KASSERT(n == NULL || radix_tree_node_clean_p(n)); 447 KASSERT(n == NULL || radix_tree_node_clean_p(n));
431 return n; 448 return n;
432} 449}
433 450
434static void 451static void
435radix_tree_free_node(struct radix_tree_node *n) 452radix_tree_free_node(struct radix_tree_node *n)
436{ 453{
437 454
438 KASSERT(radix_tree_node_clean_p(n)); 455 KASSERT(radix_tree_node_clean_p(n));
439#if defined(_KERNEL) 456#if defined(_KERNEL)
440 pool_cache_put(radix_tree_node_cache, n); 457 pool_cache_put(radix_tree_node_cache, n);
441#elif defined(_STANDALONE) 458#elif defined(_STANDALONE)
442 dealloc(n, sizeof(*n)); 459 dealloc(n, sizeof(*n));
443#else 460#else
444 free(n); 461 free(n);
445#endif 462#endif
446} 463}
447 464
448static int 465static int
449radix_tree_grow(struct radix_tree *t, unsigned int newheight) 466radix_tree_grow(struct radix_tree *t, unsigned int newheight)
450{ 467{
451 const unsigned int tagmask = entry_tagmask(t->t_root); 468 const unsigned int tagmask = entry_tagmask(t->t_root);
452 469
453 KASSERT(newheight <= 64 / RADIX_TREE_BITS_PER_HEIGHT); 470 KASSERT(newheight <= 64 / RADIX_TREE_BITS_PER_HEIGHT);
454 if (t->t_root == NULL) { 471 if (t->t_root == NULL) {
455 t->t_height = newheight; 472 t->t_height = newheight;
456 return 0; 473 return 0;
457 } 474 }
458 while (t->t_height < newheight) { 475 while (t->t_height < newheight) {
459 struct radix_tree_node *n; 476 struct radix_tree_node *n;
460 477
461 n = radix_tree_alloc_node(); 478 n = radix_tree_alloc_node();
462 if (n == NULL) { 479 if (n == NULL) {
463 /* 480 /*
464 * don't bother to revert our changes. 481 * don't bother to revert our changes.
465 * the caller will likely retry. 482 * the caller will likely retry.
466 */ 483 */
467 return ENOMEM; 484 return ENOMEM;
468 } 485 }
469 n->n_ptrs[0] = t->t_root; 486 n->n_ptrs[0] = t->t_root;
470 t->t_root = entry_compose(n, tagmask); 487 t->t_root = entry_compose(n, tagmask);
471 t->t_height++; 488 t->t_height++;
472 } 489 }
473 return 0; 490 return 0;
474} 491}
475 492
476/* 493/*
477 * radix_tree_lookup_ptr: 494 * radix_tree_lookup_ptr:
478 * 495 *
479 * an internal helper function used for various exported functions. 496 * an internal helper function used for various exported functions.
480 * 497 *
481 * return the pointer to store the node for the given index. 498 * return the pointer to store the node for the given index.
482 * 499 *
483 * if alloc is true, try to allocate the storage. (note for _KERNEL: 500 * if alloc is true, try to allocate the storage. (note for _KERNEL:
484 * in that case, this function can block.) if the allocation failed or 501 * in that case, this function can block.) if the allocation failed or
485 * alloc is false, return NULL. 502 * alloc is false, return NULL.
486 * 503 *
487 * if path is not NULL, fill it for the caller's investigation. 504 * if path is not NULL, fill it for the caller's investigation.
488 * 505 *
489 * if tagmask is not zero, search only for nodes with the tag set. 506 * if tagmask is not zero, search only for nodes with the tag set.
490 * note that, however, this function doesn't check the tagmask for the leaf 507 * note that, however, this function doesn't check the tagmask for the leaf
491 * pointer. it's a caller's responsibility to investigate the value which 508 * pointer. it's a caller's responsibility to investigate the value which
492 * is pointed by the returned pointer if necessary. 509 * is pointed by the returned pointer if necessary.
493 * 510 *
494 * while this function is a bit large, as it's called with some constant 511 * while this function is a bit large, as it's called with some constant
495 * arguments, inlining might have benefits. anyway, a compiler will decide. 512 * arguments, inlining might have benefits. anyway, a compiler will decide.
496 */ 513 */
497 514
498static inline void ** 515static inline void **
499radix_tree_lookup_ptr(struct radix_tree *t, uint64_t idx, 516radix_tree_lookup_ptr(struct radix_tree *t, uint64_t idx,
500 struct radix_tree_path *path, bool alloc, const unsigned int tagmask) 517 struct radix_tree_path *path, bool alloc, const unsigned int tagmask)
501{ 518{
502 struct radix_tree_node *n; 519 struct radix_tree_node *n;
503 int hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height; 520 int hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
504 int shift; 521 int shift;
505 void **vpp; 522 void **vpp;
506 const uint64_t mask = (UINT64_C(1) << RADIX_TREE_BITS_PER_HEIGHT) - 1; 523 const uint64_t mask = (UINT64_C(1) << RADIX_TREE_BITS_PER_HEIGHT) - 1;
507 struct radix_tree_node_ref *refs = NULL; 524 struct radix_tree_node_ref *refs = NULL;
508 525
509 /* 526 /*
510 * check unsupported combinations 527 * check unsupported combinations
511 */ 528 */
512 KASSERT(tagmask == 0 || !alloc); 529 KASSERT(tagmask == 0 || !alloc);
513 KASSERT(path == NULL || !alloc); 530 KASSERT(path == NULL || !alloc);
514 vpp = &t->t_root; 531 vpp = &t->t_root;
515 if (path != NULL) { 532 if (path != NULL) {
516 refs = path->p_refs; 533 refs = path->p_refs;
517 refs->pptr = vpp; 534 refs->pptr = vpp;
518 } 535 }
519 n = NULL; 536 n = NULL;
520 for (shift = 64 - RADIX_TREE_BITS_PER_HEIGHT; shift >= 0;) { 537 for (shift = 64 - RADIX_TREE_BITS_PER_HEIGHT; shift >= 0;) {
521 struct radix_tree_node *c; 538 struct radix_tree_node *c;
522 void *entry; 539 void *entry;
523 const uint64_t i = (idx >> shift) & mask; 540 const uint64_t i = (idx >> shift) & mask;
524 541
525 if (shift >= hshift) { 542 if (shift >= hshift) {
526 unsigned int newheight; 543 unsigned int newheight;
527 544
528 KASSERT(vpp == &t->t_root); 545 KASSERT(vpp == &t->t_root);
529 if (i == 0) { 546 if (i == 0) {
530 shift -= RADIX_TREE_BITS_PER_HEIGHT; 547 shift -= RADIX_TREE_BITS_PER_HEIGHT;
531 continue; 548 continue;
532 } 549 }
533 if (!alloc) { 550 if (!alloc) {
534 if (path != NULL) { 551 if (path != NULL) {
535 KASSERT((refs - path->p_refs) == 0); 552 KASSERT((refs - path->p_refs) == 0);
536 path->p_lastidx = 553 path->p_lastidx =
537 RADIX_TREE_INVALID_HEIGHT; 554 RADIX_TREE_INVALID_HEIGHT;
538 } 555 }
539 return NULL; 556 return NULL;
540 } 557 }
541 newheight = shift / RADIX_TREE_BITS_PER_HEIGHT + 1; 558 newheight = shift / RADIX_TREE_BITS_PER_HEIGHT + 1;
542 if (radix_tree_grow(t, newheight)) { 559 if (radix_tree_grow(t, newheight)) {
543 return NULL; 560 return NULL;
544 } 561 }
545 hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height; 562 hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
546 } 563 }
547 entry = *vpp; 564 entry = *vpp;
548 c = entry_ptr(entry); 565 c = entry_ptr(entry);
549 if (c == NULL || 566 if (c == NULL ||
550 (tagmask != 0 && 567 (tagmask != 0 &&
551 (entry_tagmask(entry) & tagmask) == 0)) { 568 (entry_tagmask(entry) & tagmask) == 0)) {
552 if (!alloc) { 569 if (!alloc) {
553 if (path != NULL) { 570 if (path != NULL) {
554 path->p_lastidx = refs - path->p_refs; 571 path->p_lastidx = refs - path->p_refs;
555 } 572 }
556 return NULL; 573 return NULL;
557 } 574 }
558 c = radix_tree_alloc_node(); 575 c = radix_tree_alloc_node();
559 if (c == NULL) { 576 if (c == NULL) {
560 return NULL; 577 return NULL;
561 } 578 }
562 *vpp = c; 579 *vpp = c;
563 } 580 }
564 n = c; 581 n = c;
565 vpp = &n->n_ptrs[i]; 582 vpp = &n->n_ptrs[i];
566 if (path != NULL) { 583 if (path != NULL) {
567 refs++; 584 refs++;
568 refs->pptr = vpp; 585 refs->pptr = vpp;
569 } 586 }
570 shift -= RADIX_TREE_BITS_PER_HEIGHT; 587 shift -= RADIX_TREE_BITS_PER_HEIGHT;
571 } 588 }
572 if (alloc) { 589 if (alloc) {
573 KASSERT(*vpp == NULL); 590 KASSERT(*vpp == NULL);
574 } 591 }
575 if (path != NULL) { 592 if (path != NULL) {
576 path->p_lastidx = refs - path->p_refs; 593 path->p_lastidx = refs - path->p_refs;
577 } 594 }
578 return vpp; 595 return vpp;
579} 596}
580 597
581/* 598/*
582 * radix_tree_insert_node: 599 * radix_tree_insert_node:
583 * 600 *
584 * Insert the node at the given index. 601 * Insert the node at the given index.
585 * 602 *
586 * It's illegal to insert NULL. It's illegal to insert a non-aligned pointer. 603 * It's illegal to insert NULL. It's illegal to insert a non-aligned pointer.
587 * 604 *
588 * This function returns ENOMEM if necessary memory allocation failed. 605 * This function returns ENOMEM if necessary memory allocation failed.
589 * Otherwise, this function returns 0. 606 * Otherwise, this function returns 0.
590 * 607 *
591 * Note that inserting a node can involves memory allocation for intermediate 608 * Note that inserting a node can involves memory allocation for intermediate
592 * nodes. If _KERNEL, it's done with no-sleep IPL_NONE memory allocation. 609 * nodes. If _KERNEL, it's done with no-sleep IPL_NONE memory allocation.
593 * 610 *
594 * For the newly inserted node, all tags are cleared. 611 * For the newly inserted node, all tags are cleared.
595 */ 612 */
596 613
597int 614int
598radix_tree_insert_node(struct radix_tree *t, uint64_t idx, void *p) 615radix_tree_insert_node(struct radix_tree *t, uint64_t idx, void *p)
599{ 616{
600 void **vpp; 617 void **vpp;
601 618
602 KASSERT(p != NULL); 619 KASSERT(p != NULL);
603 KASSERT(entry_tagmask(entry_compose(p, 0)) == 0); 620 KASSERT(entry_tagmask(entry_compose(p, 0)) == 0);
604 vpp = radix_tree_lookup_ptr(t, idx, NULL, true, 0); 621 vpp = radix_tree_lookup_ptr(t, idx, NULL, true, 0);
605 if (vpp == NULL) { 622 if (vpp == NULL) {
606 return ENOMEM; 623 return ENOMEM;
607 } 624 }
608 KASSERT(*vpp == NULL); 625 KASSERT(*vpp == NULL);
609 *vpp = p; 626 *vpp = p;
610 return 0; 627 return 0;
611} 628}
612 629
613/* 630/*
614 * radix_tree_replace_node: 631 * radix_tree_replace_node:
615 * 632 *
616 * Replace a node at the given index with the given node and return the 633 * Replace a node at the given index with the given node and return the
617 * replaced one. 634 * replaced one.
618 * 635 *
619 * It's illegal to try to replace a node which has not been inserted. 636 * It's illegal to try to replace a node which has not been inserted.
620 * 637 *
621 * This function keeps tags intact. 638 * This function keeps tags intact.
622 */ 639 */
623 640
624void * 641void *
625radix_tree_replace_node(struct radix_tree *t, uint64_t idx, void *p) 642radix_tree_replace_node(struct radix_tree *t, uint64_t idx, void *p)
626{ 643{
627 void **vpp; 644 void **vpp;
628 void *oldp; 645 void *oldp;
629 646
630 KASSERT(p != NULL); 647 KASSERT(p != NULL);
631 KASSERT(entry_tagmask(entry_compose(p, 0)) == 0); 648 KASSERT(entry_tagmask(entry_compose(p, 0)) == 0);
632 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0); 649 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
633 KASSERT(vpp != NULL); 650 KASSERT(vpp != NULL);
634 oldp = *vpp; 651 oldp = *vpp;
635 KASSERT(oldp != NULL); 652 KASSERT(oldp != NULL);
636 *vpp = entry_compose(p, entry_tagmask(*vpp)); 653 *vpp = entry_compose(p, entry_tagmask(*vpp));
637 return entry_ptr(oldp); 654 return entry_ptr(oldp);
638} 655}
639 656
640/* 657/*
641 * radix_tree_remove_node: 658 * radix_tree_remove_node:
642 * 659 *
643 * Remove the node at the given index. 660 * Remove the node at the given index.
644 * 661 *
645 * It's illegal to try to remove a node which has not been inserted. 662 * It's illegal to try to remove a node which has not been inserted.
646 */ 663 */
647 664
648void * 665void *
649radix_tree_remove_node(struct radix_tree *t, uint64_t idx) 666radix_tree_remove_node(struct radix_tree *t, uint64_t idx)
650{ 667{
651 struct radix_tree_path path; 668 struct radix_tree_path path;
652 void **vpp; 669 void **vpp;
653 void *oldp; 670 void *oldp;
654 int i; 671 int i;
655 672
656 vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0); 673 vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
657 KASSERT(vpp != NULL); 674 KASSERT(vpp != NULL);
658 oldp = *vpp; 675 oldp = *vpp;
659 KASSERT(oldp != NULL); 676 KASSERT(oldp != NULL);
660 KASSERT(path.p_lastidx == t->t_height); 677 KASSERT(path.p_lastidx == t->t_height);
661 KASSERT(vpp == path_pptr(t, &path, path.p_lastidx)); 678 KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
662 *vpp = NULL; 679 *vpp = NULL;
663 for (i = t->t_height - 1; i >= 0; i--) { 680 for (i = t->t_height - 1; i >= 0; i--) {
664 void *entry; 681 void *entry;
665 struct radix_tree_node ** const pptr = 682 struct radix_tree_node ** const pptr =
666 (struct radix_tree_node **)path_pptr(t, &path, i); 683 (struct radix_tree_node **)path_pptr(t, &path, i);
667 struct radix_tree_node *n; 684 struct radix_tree_node *n;
668 685
669 KASSERT(pptr != NULL); 686 KASSERT(pptr != NULL);
670 entry = *pptr; 687 entry = *pptr;
671 n = entry_ptr(entry); 688 n = entry_ptr(entry);
672 KASSERT(n != NULL); 689 KASSERT(n != NULL);
673 if (!radix_tree_node_clean_p(n)) { 690 if (!radix_tree_node_clean_p(n)) {
674 break; 691 break;
675 } 692 }
676 radix_tree_free_node(n); 693 radix_tree_free_node(n);
677 *pptr = NULL; 694 *pptr = NULL;
678 } 695 }
679 /* 696 /*
680 * fix up height 697 * fix up height
681 */ 698 */
682 if (i < 0) { 699 if (i < 0) {
683 KASSERT(t->t_root == NULL); 700 KASSERT(t->t_root == NULL);
684 t->t_height = 0; 701 t->t_height = 0;
685 } 702 }
686 /* 703 /*
687 * update tags 704 * update tags
688 */ 705 */
689 for (; i >= 0; i--) { 706 for (; i >= 0; i--) {
690 void *entry; 707 void *entry;
691 struct radix_tree_node ** const pptr = 708 struct radix_tree_node ** const pptr =
692 (struct radix_tree_node **)path_pptr(t, &path, i); 709 (struct radix_tree_node **)path_pptr(t, &path, i);
693 struct radix_tree_node *n; 710 struct radix_tree_node *n;
694 unsigned int newmask; 711 unsigned int newmask;
695 712
696 KASSERT(pptr != NULL); 713 KASSERT(pptr != NULL);
697 entry = *pptr; 714 entry = *pptr;
698 n = entry_ptr(entry); 715 n = entry_ptr(entry);
699 KASSERT(n != NULL); 716 KASSERT(n != NULL);
700 KASSERT(!radix_tree_node_clean_p(n)); 717 KASSERT(!radix_tree_node_clean_p(n));
701 newmask = any_children_tagmask(n); 718 newmask = any_children_tagmask(n);
702 if (newmask == entry_tagmask(entry)) { 719 if (newmask == entry_tagmask(entry)) {
703 break; 720 break;
704 } 721 }
705 *pptr = entry_compose(n, newmask); 722 *pptr = entry_compose(n, newmask);
706 } 723 }
707 /* 724 /*
708 * XXX is it worth to try to reduce height? 725 * XXX is it worth to try to reduce height?
709 * if we do that, make radix_tree_grow rollback its change as well. 726 * if we do that, make radix_tree_grow rollback its change as well.
710 */ 727 */
711 return entry_ptr(oldp); 728 return entry_ptr(oldp);
712} 729}
713 730
714/* 731/*
715 * radix_tree_lookup_node: 732 * radix_tree_lookup_node:
716 * 733 *
717 * Returns the node at the given index. 734 * Returns the node at the given index.
718 * Returns NULL if nothing is found at the given index. 735 * Returns NULL if nothing is found at the given index.
719 */ 736 */
720 737
721void * 738void *
722radix_tree_lookup_node(struct radix_tree *t, uint64_t idx) 739radix_tree_lookup_node(struct radix_tree *t, uint64_t idx)
723{ 740{
724 void **vpp; 741 void **vpp;
725 742
726 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0); 743 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
727 if (vpp == NULL) { 744 if (vpp == NULL) {
728 return NULL; 745 return NULL;
729 } 746 }
730 return entry_ptr(*vpp); 747 return entry_ptr(*vpp);
731} 748}
732 749
733static inline void 750static inline void
734gang_lookup_init(struct radix_tree *t, uint64_t idx, 751gang_lookup_init(struct radix_tree *t, uint64_t idx,
735 struct radix_tree_path *path, const unsigned int tagmask) 752 struct radix_tree_path *path, const unsigned int tagmask)
736{ 753{
737 void **vpp __unused; 754 void **vpp __unused;
738 755
739 vpp = radix_tree_lookup_ptr(t, idx, path, false, tagmask); 756 vpp = radix_tree_lookup_ptr(t, idx, path, false, tagmask);
740 KASSERT(vpp == NULL || 757 KASSERT(vpp == NULL ||
741 vpp == path_pptr(t, path, path->p_lastidx)); 758 vpp == path_pptr(t, path, path->p_lastidx));
742 KASSERT(&t->t_root == path_pptr(t, path, 0)); 759 KASSERT(&t->t_root == path_pptr(t, path, 0));
743 KASSERT(path->p_lastidx == RADIX_TREE_INVALID_HEIGHT || 760 KASSERT(path->p_lastidx == RADIX_TREE_INVALID_HEIGHT ||
744 path->p_lastidx == t->t_height || 761 path->p_lastidx == t->t_height ||
745 !entry_match_p(*path_pptr(t, path, path->p_lastidx), tagmask)); 762 !entry_match_p(*path_pptr(t, path, path->p_lastidx), tagmask));
746} 763}
747 764
748/* 765/*
749 * gang_lookup_scan: 766 * gang_lookup_scan:
750 * 767 *
751 * a helper routine for radix_tree_gang_lookup_node and its variants. 768 * a helper routine for radix_tree_gang_lookup_node and its variants.
752 */ 769 */
753 770
754static inline unsigned int 771static inline unsigned int
755__attribute__((__always_inline__)) 772__attribute__((__always_inline__))
756gang_lookup_scan(struct radix_tree *t, struct radix_tree_path *path, 773gang_lookup_scan(struct radix_tree *t, struct radix_tree_path *path,
757 void **results, const unsigned int maxresults, const unsigned int tagmask, 774 void **results, const unsigned int maxresults, const unsigned int tagmask,
758 const bool reverse, const bool dense) 775 const bool reverse, const bool dense)
759{ 776{
760 777
761 /* 778 /*
762 * we keep the path updated only for lastidx-1. 779 * we keep the path updated only for lastidx-1.
763 * vpp is what path_pptr(t, path, lastidx) would be. 780 * vpp is what path_pptr(t, path, lastidx) would be.
764 */ 781 */
765 void **vpp; 782 void **vpp;
766 unsigned int nfound; 783 unsigned int nfound;
767 unsigned int lastidx; 784 unsigned int lastidx;
768 /* 785 /*
769 * set up scan direction dependant constants so that we can iterate 786 * set up scan direction dependant constants so that we can iterate
770 * n_ptrs as the following. 787 * n_ptrs as the following.
771 * 788 *
772 * for (i = first; i != guard; i += step) 789 * for (i = first; i != guard; i += step)
773 * visit n->n_ptrs[i]; 790 * visit n->n_ptrs[i];
774 */ 791 */
775 const int step = reverse ? -1 : 1; 792 const int step = reverse ? -1 : 1;
776 const unsigned int first = reverse ? RADIX_TREE_PTR_PER_NODE - 1 : 0; 793 const unsigned int first = reverse ? RADIX_TREE_PTR_PER_NODE - 1 : 0;
777 const unsigned int last = reverse ? 0 : RADIX_TREE_PTR_PER_NODE - 1; 794 const unsigned int last = reverse ? 0 : RADIX_TREE_PTR_PER_NODE - 1;
778 const unsigned int guard = last + step; 795 const unsigned int guard = last + step;
779 796
780 KASSERT(maxresults > 0); 797 KASSERT(maxresults > 0);
781 KASSERT(&t->t_root == path_pptr(t, path, 0)); 798 KASSERT(&t->t_root == path_pptr(t, path, 0));
782 lastidx = path->p_lastidx; 799 lastidx = path->p_lastidx;
783 KASSERT(lastidx == RADIX_TREE_INVALID_HEIGHT || 800 KASSERT(lastidx == RADIX_TREE_INVALID_HEIGHT ||
784 lastidx == t->t_height || 801 lastidx == t->t_height ||
785 !entry_match_p(*path_pptr(t, path, lastidx), tagmask)); 802 !entry_match_p(*path_pptr(t, path, lastidx), tagmask));
786 nfound = 0; 803 nfound = 0;
787 if (lastidx == RADIX_TREE_INVALID_HEIGHT) { 804 if (lastidx == RADIX_TREE_INVALID_HEIGHT) {
788 /* 805 /*
789 * requested idx is beyond the right-most node. 806 * requested idx is beyond the right-most node.
790 */ 807 */
791 if (reverse && !dense) { 808 if (reverse && !dense) {
792 lastidx = 0; 809 lastidx = 0;
793 vpp = path_pptr(t, path, lastidx); 810 vpp = path_pptr(t, path, lastidx);
794 goto descend; 811 goto descend;
795 } 812 }
796 return 0; 813 return 0;
797 } 814 }
798 vpp = path_pptr(t, path, lastidx); 815 vpp = path_pptr(t, path, lastidx);
799 while (/*CONSTCOND*/true) { 816 while (/*CONSTCOND*/true) {
800 struct radix_tree_node *n; 817 struct radix_tree_node *n;
801 unsigned int i; 818 unsigned int i;
802 819
803 if (entry_match_p(*vpp, tagmask)) { 820 if (entry_match_p(*vpp, tagmask)) {
804 KASSERT(lastidx == t->t_height); 821 KASSERT(lastidx == t->t_height);
805 /* 822 /*
806 * record the matching non-NULL leaf. 823 * record the matching non-NULL leaf.
807 */ 824 */
808 results[nfound] = entry_ptr(*vpp); 825 results[nfound] = entry_ptr(*vpp);
809 nfound++; 826 nfound++;
810 if (nfound == maxresults) { 827 if (nfound == maxresults) {
811 return nfound; 828 return nfound;
812 } 829 }
813 } else if (dense) { 830 } else if (dense) {
814 return nfound; 831 return nfound;
815 } 832 }
816scan_siblings: 833scan_siblings:
817 /* 834 /*
818 * try to find the next matching non-NULL sibling. 835 * try to find the next matching non-NULL sibling.
819 */ 836 */
820 if (lastidx == 0) { 837 if (lastidx == 0) {
821 /* 838 /*
822 * the root has no siblings. 839 * the root has no siblings.
823 * we've done. 840 * we've done.
824 */ 841 */
825 KASSERT(vpp == &t->t_root); 842 KASSERT(vpp == &t->t_root);
826 break; 843 break;
827 } 844 }
828 n = path_node(t, path, lastidx - 1); 845 n = path_node(t, path, lastidx - 1);
829 /* 846 /*
830 * we used to have an integer counter in the node, and this 847 * we used to have an integer counter in the node, and this
831 * optimization made sense then, even though marginal. it 848 * optimization made sense then, even though marginal. it
832 * no longer provides benefit with the structure cache line 849 * no longer provides benefit with the structure cache line
833 * aligned and the counter replaced by an unrolled sequence 850 * aligned and the counter replaced by an unrolled sequence
834 * testing the pointers in batch. 851 * testing the pointers in batch.
835 */ 852 */
836#if 0 853#if 0
837 if (*vpp != NULL && radix_tree_node_count_ptrs(n) == 1) { 854 if (*vpp != NULL && radix_tree_node_count_ptrs(n) == 1) {
838 /* 855 /*
839 * optimization; if the node has only a single pointer 856 * optimization; if the node has only a single pointer
840 * and we've already visited it, there's no point to 857 * and we've already visited it, there's no point to
841 * keep scanning in this node. 858 * keep scanning in this node.
842 */ 859 */
843 goto no_siblings; 860 goto no_siblings;
844 } 861 }
845#endif /* 0 */ 862#endif /* 0 */
846 for (i = vpp - n->n_ptrs + step; i != guard; i += step) { 863 for (i = vpp - n->n_ptrs + step; i != guard; i += step) {
847 KASSERT(i < RADIX_TREE_PTR_PER_NODE); 864 KASSERT(i < RADIX_TREE_PTR_PER_NODE);
848 if (entry_match_p(n->n_ptrs[i], tagmask)) { 865 if (entry_match_p(n->n_ptrs[i], tagmask)) {
849 vpp = &n->n_ptrs[i]; 866 vpp = &n->n_ptrs[i];
850 break; 867 break;
851 } 868 }
852 } 869 }
853 if (i == guard) { 870 if (i == guard) {
854#if 0 871#if 0
855no_siblings: 872no_siblings:
856#endif /* 0 */ 873#endif /* 0 */
857 /* 874 /*
858 * not found. go to parent. 875 * not found. go to parent.
859 */ 876 */
860 lastidx--; 877 lastidx--;
861 vpp = path_pptr(t, path, lastidx); 878 vpp = path_pptr(t, path, lastidx);
862 goto scan_siblings; 879 goto scan_siblings;
863 } 880 }
864descend: 881descend:
865 /* 882 /*
866 * following the left-most (or right-most in the case of 883 * following the left-most (or right-most in the case of
867 * reverse scan) child node, decend until reaching the leaf or 884 * reverse scan) child node, decend until reaching the leaf or
868 * an non-matching entry. 885 * an non-matching entry.
869 */ 886 */
870 while (entry_match_p(*vpp, tagmask) && lastidx < t->t_height) { 887 while (entry_match_p(*vpp, tagmask) && lastidx < t->t_height) {
871 /* 888 /*
872 * save vpp in the path so that we can come back to this 889 * save vpp in the path so that we can come back to this
873 * node after finishing visiting children. 890 * node after finishing visiting children.
874 */ 891 */
875 path->p_refs[lastidx].pptr = vpp; 892 path->p_refs[lastidx].pptr = vpp;
876 n = entry_ptr(*vpp); 893 n = entry_ptr(*vpp);
877 vpp = &n->n_ptrs[first]; 894 vpp = &n->n_ptrs[first];
878 lastidx++; 895 lastidx++;
879 } 896 }
880 } 897 }
881 return nfound; 898 return nfound;
882} 899}
883 900
884/* 901/*
885 * radix_tree_gang_lookup_node: 902 * radix_tree_gang_lookup_node:
886 * 903 *
887 * Scan the tree starting from the given index in the ascending order and 904 * Scan the tree starting from the given index in the ascending order and
888 * return found nodes. 905 * return found nodes.
889 * 906 *
890 * results should be an array large enough to hold maxresults pointers. 907 * results should be an array large enough to hold maxresults pointers.
891 * This function returns the number of nodes found, up to maxresults. 908 * This function returns the number of nodes found, up to maxresults.
892 * Returning less than maxresults means there are no more nodes in the tree. 909 * Returning less than maxresults means there are no more nodes in the tree.
893 * 910 *
894 * If dense == true, this function stops scanning when it founds a hole of 911 * If dense == true, this function stops scanning when it founds a hole of
895 * indexes. I.e. an index for which radix_tree_lookup_node would returns NULL. 912 * indexes. I.e. an index for which radix_tree_lookup_node would returns NULL.
896 * If dense == false, this function skips holes and continue scanning until 913 * If dense == false, this function skips holes and continue scanning until
897 * maxresults nodes are found or it reaches the limit of the index range. 914 * maxresults nodes are found or it reaches the limit of the index range.
898 * 915 *
899 * The result of this function is semantically equivalent to what could be 916 * The result of this function is semantically equivalent to what could be
900 * obtained by repeated calls of radix_tree_lookup_node with increasing index. 917 * obtained by repeated calls of radix_tree_lookup_node with increasing index.
901 * but this function is expected to be computationally cheaper when looking up 918 * but this function is expected to be computationally cheaper when looking up
902 * multiple nodes at once. Especially, it's expected to be much cheaper when 919 * multiple nodes at once. Especially, it's expected to be much cheaper when
903 * node indexes are distributed sparsely. 920 * node indexes are distributed sparsely.
904 * 921 *
905 * Note that this function doesn't return index values of found nodes. 922 * Note that this function doesn't return index values of found nodes.
906 * Thus, in the case of dense == false, if index values are important for 923 * Thus, in the case of dense == false, if index values are important for
907 * a caller, it's the caller's responsibility to check them, typically 924 * a caller, it's the caller's responsibility to check them, typically
908 * by examinining the returned nodes using some caller-specific knowledge 925 * by examinining the returned nodes using some caller-specific knowledge
909 * about them. 926 * about them.
910 * In the case of dense == true, a node returned via results[N] is always for 927 * In the case of dense == true, a node returned via results[N] is always for
911 * the index (idx + N). 928 * the index (idx + N).
912 */ 929 */
913 930
914unsigned int 931unsigned int
915radix_tree_gang_lookup_node(struct radix_tree *t, uint64_t idx, 932radix_tree_gang_lookup_node(struct radix_tree *t, uint64_t idx,
916 void **results, unsigned int maxresults, bool dense) 933 void **results, unsigned int maxresults, bool dense)
917{ 934{
918 struct radix_tree_path path; 935 struct radix_tree_path path;
919 936
920 gang_lookup_init(t, idx, &path, 0); 937 gang_lookup_init(t, idx, &path, 0);
921 return gang_lookup_scan(t, &path, results, maxresults, 0, false, dense); 938 return gang_lookup_scan(t, &path, results, maxresults, 0, false, dense);
922} 939}
923 940
924/* 941/*
925 * radix_tree_gang_lookup_node_reverse: 942 * radix_tree_gang_lookup_node_reverse:
926 * 943 *
927 * Same as radix_tree_gang_lookup_node except that this one scans the 944 * Same as radix_tree_gang_lookup_node except that this one scans the
928 * tree in the reverse order. I.e. descending index values. 945 * tree in the reverse order. I.e. descending index values.
929 */ 946 */
930 947
931unsigned int 948unsigned int
932radix_tree_gang_lookup_node_reverse(struct radix_tree *t, uint64_t idx, 949radix_tree_gang_lookup_node_reverse(struct radix_tree *t, uint64_t idx,
933 void **results, unsigned int maxresults, bool dense) 950 void **results, unsigned int maxresults, bool dense)
934{ 951{
935 struct radix_tree_path path; 952 struct radix_tree_path path;
936 953
937 gang_lookup_init(t, idx, &path, 0); 954 gang_lookup_init(t, idx, &path, 0);
938 return gang_lookup_scan(t, &path, results, maxresults, 0, true, dense); 955 return gang_lookup_scan(t, &path, results, maxresults, 0, true, dense);
939} 956}
940 957
941/* 958/*
942 * radix_tree_gang_lookup_tagged_node: 959 * radix_tree_gang_lookup_tagged_node:
943 * 960 *
944 * Same as radix_tree_gang_lookup_node except that this one only returns 961 * Same as radix_tree_gang_lookup_node except that this one only returns
945 * nodes tagged with tagid. 962 * nodes tagged with tagid.
946 * 963 *
947 * It's illegal to call this function with tagmask 0. 964 * It's illegal to call this function with tagmask 0.
948 */ 965 */
949 966
950unsigned int 967unsigned int
951radix_tree_gang_lookup_tagged_node(struct radix_tree *t, uint64_t idx, 968radix_tree_gang_lookup_tagged_node(struct radix_tree *t, uint64_t idx,
952 void **results, unsigned int maxresults, bool dense, unsigned int tagmask) 969 void **results, unsigned int maxresults, bool dense, unsigned int tagmask)
953{ 970{
954 struct radix_tree_path path; 971 struct radix_tree_path path;
955 972
956 KASSERT(tagmask != 0); 973 KASSERT(tagmask != 0);
957 gang_lookup_init(t, idx, &path, tagmask); 974 gang_lookup_init(t, idx, &path, tagmask);
958 return gang_lookup_scan(t, &path, results, maxresults, tagmask, false, 975 return gang_lookup_scan(t, &path, results, maxresults, tagmask, false,
959 dense); 976 dense);
960} 977}
961 978
962/* 979/*
963 * radix_tree_gang_lookup_tagged_node_reverse: 980 * radix_tree_gang_lookup_tagged_node_reverse:
964 * 981 *
965 * Same as radix_tree_gang_lookup_tagged_node except that this one scans the 982 * Same as radix_tree_gang_lookup_tagged_node except that this one scans the
966 * tree in the reverse order. I.e. descending index values. 983 * tree in the reverse order. I.e. descending index values.
967 */ 984 */
968 985
969unsigned int 986unsigned int
970radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *t, uint64_t idx, 987radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *t, uint64_t idx,
971 void **results, unsigned int maxresults, bool dense, unsigned int tagmask) 988 void **results, unsigned int maxresults, bool dense, unsigned int tagmask)
972{ 989{
973 struct radix_tree_path path; 990 struct radix_tree_path path;
974 991
975 KASSERT(tagmask != 0); 992 KASSERT(tagmask != 0);
976 gang_lookup_init(t, idx, &path, tagmask); 993 gang_lookup_init(t, idx, &path, tagmask);
977 return gang_lookup_scan(t, &path, results, maxresults, tagmask, true, 994 return gang_lookup_scan(t, &path, results, maxresults, tagmask, true,
978 dense); 995 dense);
979} 996}
980 997
981/* 998/*
982 * radix_tree_get_tag: 999 * radix_tree_get_tag:
983 * 1000 *
984 * Return the tagmask for the node at the given index. 1001 * Return the tagmask for the node at the given index.
985 * 1002 *
986 * It's illegal to call this function for a node which has not been inserted. 1003 * It's illegal to call this function for a node which has not been inserted.
987 */ 1004 */
988 1005
989unsigned int 1006unsigned int
990radix_tree_get_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask) 1007radix_tree_get_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
991{ 1008{
992 /* 1009 /*
993 * the following two implementations should behave same. 1010 * the following two implementations should behave same.
994 * the former one was chosen because it seems faster. 1011 * the former one was chosen because it seems faster.
995 */ 1012 */
996#if 1 1013#if 1
997 void **vpp; 1014 void **vpp;
998 1015
999 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, tagmask); 1016 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, tagmask);
1000 if (vpp == NULL) { 1017 if (vpp == NULL) {
1001 return false; 1018 return false;
1002 } 1019 }
1003 KASSERT(*vpp != NULL); 1020 KASSERT(*vpp != NULL);
1004 return (entry_tagmask(*vpp) & tagmask); 1021 return (entry_tagmask(*vpp) & tagmask);
1005#else 1022#else
1006 void **vpp; 1023 void **vpp;
1007 1024
1008 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0); 1025 vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
1009 KASSERT(vpp != NULL); 1026 KASSERT(vpp != NULL);
1010 return (entry_tagmask(*vpp) & tagmask); 1027 return (entry_tagmask(*vpp) & tagmask);
1011#endif 1028#endif
1012} 1029}
1013 1030
1014/* 1031/*
1015 * radix_tree_set_tag: 1032 * radix_tree_set_tag:
1016 * 1033 *
1017 * Set the tag for the node at the given index. 1034 * Set the tag for the node at the given index.
1018 * 1035 *
1019 * It's illegal to call this function for a node which has not been inserted. 1036 * It's illegal to call this function for a node which has not been inserted.
1020 * It's illegal to call this function with tagmask 0. 1037 * It's illegal to call this function with tagmask 0.
1021 */ 1038 */
1022 1039
1023void 1040void
1024radix_tree_set_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask) 1041radix_tree_set_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
1025{ 1042{
1026 struct radix_tree_path path; 1043 struct radix_tree_path path;
1027 void **vpp __unused; 1044 void **vpp __unused;
1028 int i; 1045 int i;
1029 1046
1030 KASSERT(tagmask != 0); 1047 KASSERT(tagmask != 0);
1031 vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0); 1048 vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
1032 KASSERT(vpp != NULL); 1049 KASSERT(vpp != NULL);
1033 KASSERT(*vpp != NULL); 1050 KASSERT(*vpp != NULL);
1034 KASSERT(path.p_lastidx == t->t_height); 1051 KASSERT(path.p_lastidx == t->t_height);
1035 KASSERT(vpp == path_pptr(t, &path, path.p_lastidx)); 1052 KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
1036 for (i = t->t_height; i >= 0; i--) { 1053 for (i = t->t_height; i >= 0; i--) {
1037 void ** const pptr = (void **)path_pptr(t, &path, i); 1054 void ** const pptr = (void **)path_pptr(t, &path, i);
1038 void *entry; 1055 void *entry;
1039 1056
1040 KASSERT(pptr != NULL); 1057 KASSERT(pptr != NULL);
1041 entry = *pptr; 1058 entry = *pptr;
1042 if ((entry_tagmask(entry) & tagmask) != 0) { 1059 if ((entry_tagmask(entry) & tagmask) != 0) {
1043 break; 1060 break;
1044 } 1061 }
1045 *pptr = (void *)((uintptr_t)entry | tagmask); 1062 *pptr = (void *)((uintptr_t)entry | tagmask);
1046 } 1063 }
1047} 1064}
1048 1065
1049/* 1066/*
1050 * radix_tree_clear_tag: 1067 * radix_tree_clear_tag:
1051 * 1068 *
1052 * Clear the tag for the node at the given index. 1069 * Clear the tag for the node at the given index.
1053 * 1070 *
1054 * It's illegal to call this function for a node which has not been inserted. 1071 * It's illegal to call this function for a node which has not been inserted.
1055 * It's illegal to call this function with tagmask 0. 1072 * It's illegal to call this function with tagmask 0.
1056 */ 1073 */
1057 1074
1058void 1075void
1059radix_tree_clear_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask) 1076radix_tree_clear_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
1060{ 1077{
1061 struct radix_tree_path path; 1078 struct radix_tree_path path;
1062 void **vpp; 1079 void **vpp;
1063 int i; 1080 int i;
1064 1081
1065 KASSERT(tagmask != 0); 1082 KASSERT(tagmask != 0);
1066 vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0); 1083 vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
1067 KASSERT(vpp != NULL); 1084 KASSERT(vpp != NULL);
1068 KASSERT(*vpp != NULL); 1085 KASSERT(*vpp != NULL);
1069 KASSERT(path.p_lastidx == t->t_height); 1086 KASSERT(path.p_lastidx == t->t_height);
1070 KASSERT(vpp == path_pptr(t, &path, path.p_lastidx)); 1087 KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
1071 /* 1088 /*
1072 * if already cleared, nothing to do 1089 * if already cleared, nothing to do
1073 */ 1090 */
1074 if ((entry_tagmask(*vpp) & tagmask) == 0) { 1091 if ((entry_tagmask(*vpp) & tagmask) == 0) {
1075 return; 1092 return;
1076 } 1093 }
1077 /* 1094 /*
1078 * clear the tag only if no children have the tag. 1095 * clear the tag only if no children have the tag.
1079 */ 1096 */
1080 for (i = t->t_height; i >= 0; i--) { 1097 for (i = t->t_height; i >= 0; i--) {
1081 void ** const pptr = (void **)path_pptr(t, &path, i); 1098 void ** const pptr = (void **)path_pptr(t, &path, i);
1082 void *entry; 1099 void *entry;
1083 1100
1084 KASSERT(pptr != NULL); 1101 KASSERT(pptr != NULL);
1085 entry = *pptr; 1102 entry = *pptr;
1086 KASSERT((entry_tagmask(entry) & tagmask) != 0); 1103 KASSERT((entry_tagmask(entry) & tagmask) != 0);
1087 *pptr = entry_compose(entry_ptr(entry), 1104 *pptr = entry_compose(entry_ptr(entry),
1088 entry_tagmask(entry) & ~tagmask); 1105 entry_tagmask(entry) & ~tagmask);
1089 /* 1106 /*
1090 * check if we should proceed to process the next level. 1107 * check if we should proceed to process the next level.
1091 */ 1108 */
1092 if (0 < i) { 1109 if (0 < i) {
1093 struct radix_tree_node *n = path_node(t, &path, i - 1); 1110 struct radix_tree_node *n = path_node(t, &path, i - 1);
1094 1111
1095 if ((any_children_tagmask(n) & tagmask) != 0) { 1112 if ((any_children_tagmask(n) & tagmask) != 0) {
1096 break; 1113 break;
1097 } 1114 }
1098 } 1115 }
1099 } 1116 }
1100} 1117}
1101 1118
1102#if defined(UNITTEST) 1119#if defined(UNITTEST)
1103 1120
1104#include <inttypes.h> 1121#include <inttypes.h>
1105#include <stdio.h> 1122#include <stdio.h>
1106 1123
1107static void 1124static void
1108radix_tree_dump_node(const struct radix_tree *t, void *vp, 1125radix_tree_dump_node(const struct radix_tree *t, void *vp,
1109 uint64_t offset, unsigned int height) 1126 uint64_t offset, unsigned int height)
1110{ 1127{
1111 struct radix_tree_node *n; 1128 struct radix_tree_node *n;
1112 unsigned int i; 1129 unsigned int i;
1113 1130
1114 for (i = 0; i < t->t_height - height; i++) { 1131 for (i = 0; i < t->t_height - height; i++) {
1115 printf(" "); 1132 printf(" ");
1116 } 1133 }
1117 if (entry_tagmask(vp) == 0) { 1134 if (entry_tagmask(vp) == 0) {
1118 printf("[%" PRIu64 "] %p", offset, entry_ptr(vp)); 1135 printf("[%" PRIu64 "] %p", offset, entry_ptr(vp));
1119 } else { 1136 } else {
1120 printf("[%" PRIu64 "] %p (tagmask=0x%x)", offset, entry_ptr(vp), 1137 printf("[%" PRIu64 "] %p (tagmask=0x%x)", offset, entry_ptr(vp),
1121 entry_tagmask(vp)); 1138 entry_tagmask(vp));
1122 } 1139 }
1123 if (height == 0) { 1140 if (height == 0) {
1124 printf(" (leaf)\n"); 1141 printf(" (leaf)\n");
1125 return; 1142 return;
1126 } 1143 }
1127 n = entry_ptr(vp); 1144 n = entry_ptr(vp);
1128 assert(any_children_tagmask(n) == entry_tagmask(vp)); 1145 assert(any_children_tagmask(n) == entry_tagmask(vp));
1129 printf(" (%u children)\n", radix_tree_node_count_ptrs(n)); 1146 printf(" (%u children)\n", radix_tree_node_count_ptrs(n));
1130 for (i = 0; i < __arraycount(n->n_ptrs); i++) { 1147 for (i = 0; i < __arraycount(n->n_ptrs); i++) {
1131 void *c; 1148 void *c;
1132 1149
1133 c = n->n_ptrs[i]; 1150 c = n->n_ptrs[i];
1134 if (c == NULL) { 1151 if (c == NULL) {
1135 continue; 1152 continue;
1136 } 1153 }
1137 radix_tree_dump_node(t, c, 1154 radix_tree_dump_node(t, c,
1138 offset + i * (UINT64_C(1) << 1155 offset + i * (UINT64_C(1) <<
1139 (RADIX_TREE_BITS_PER_HEIGHT * (height - 1))), height - 1); 1156 (RADIX_TREE_BITS_PER_HEIGHT * (height - 1))), height - 1);
1140 } 1157 }
1141} 1158}
1142 1159
1143void radix_tree_dump(const struct radix_tree *); 1160void radix_tree_dump(const struct radix_tree *);
1144 1161
1145void 1162void
1146radix_tree_dump(const struct radix_tree *t) 1163radix_tree_dump(const struct radix_tree *t)
1147{ 1164{
1148 1165
1149 printf("tree %p height=%u\n", t, t->t_height); 1166 printf("tree %p height=%u\n", t, t->t_height);
1150 radix_tree_dump_node(t, t->t_root, 0, t->t_height); 1167 radix_tree_dump_node(t, t->t_root, 0, t->t_height);
1151} 1168}
1152 1169
1153static void 1170static void
1154test1(void) 1171test1(void)
1155{ 1172{
1156 struct radix_tree s; 1173 struct radix_tree s;
1157 struct radix_tree *t = &s; 1174 struct radix_tree *t = &s;
1158 void *results[3]; 1175 void *results[3];
1159 1176
1160 radix_tree_init_tree(t); 1177 radix_tree_init_tree(t);
1161 radix_tree_dump(t); 1178 radix_tree_dump(t);
1162 assert(radix_tree_lookup_node(t, 0) == NULL); 1179 assert(radix_tree_lookup_node(t, 0) == NULL);
1163 assert(radix_tree_lookup_node(t, 1000) == NULL); 1180 assert(radix_tree_lookup_node(t, 1000) == NULL);
1164 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 0); 1181 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 0);
1165 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0); 1182 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0);
1166 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0); 1183 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0);
1167 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0); 1184 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0);
1168 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) == 1185 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) ==
1169 0); 1186 0);
1170 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) == 1187 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) ==
1171 0); 1188 0);
1172 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false) 1189 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1173 == 0); 1190 == 0);
1174 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true) 1191 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1175 == 0); 1192 == 0);
1176 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1) 1193 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1177 == 0); 1194 == 0);
1178 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1) 1195 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1179 == 0); 1196 == 0);
1180 assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, false, 1) 1197 assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, false, 1)
1181 == 0); 1198 == 0);
1182 assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, true, 1) 1199 assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, true, 1)
1183 == 0); 1200 == 0);
1184 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 1201 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1185 false, 1) == 0); 1202 false, 1) == 0);
1186 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 1203 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1187 true, 1) == 0); 1204 true, 1) == 0);
1188 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3, 1205 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1189 false, 1) == 0); 1206 false, 1) == 0);
1190 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3, 1207 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1191 true, 1) == 0); 1208 true, 1) == 0);
1192 assert(radix_tree_empty_tree_p(t)); 1209 assert(radix_tree_empty_tree_p(t));
1193 assert(radix_tree_empty_tagged_tree_p(t, 1)); 1210 assert(radix_tree_empty_tagged_tree_p(t, 1));
1194 assert(radix_tree_empty_tagged_tree_p(t, 2)); 1211 assert(radix_tree_empty_tagged_tree_p(t, 2));
1195 assert(radix_tree_insert_node(t, 0, (void *)0xdeadbea0) == 0); 1212 assert(radix_tree_insert_node(t, 0, (void *)0xdeadbea0) == 0);
1196 assert(!radix_tree_empty_tree_p(t)); 1213 assert(!radix_tree_empty_tree_p(t));
1197 assert(radix_tree_empty_tagged_tree_p(t, 1)); 1214 assert(radix_tree_empty_tagged_tree_p(t, 1));
1198 assert(radix_tree_empty_tagged_tree_p(t, 2)); 1215 assert(radix_tree_empty_tagged_tree_p(t, 2));
1199 assert(radix_tree_lookup_node(t, 0) == (void *)0xdeadbea0); 1216 assert(radix_tree_lookup_node(t, 0) == (void *)0xdeadbea0);
1200 assert(radix_tree_lookup_node(t, 1000) == NULL); 1217 assert(radix_tree_lookup_node(t, 1000) == NULL);
1201 memset(results, 0, sizeof(results)); 1218 memset(results, 0, sizeof(results));
1202 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1); 1219 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1);
1203 assert(results[0] == (void *)0xdeadbea0); 1220 assert(results[0] == (void *)0xdeadbea0);
1204 memset(results, 0, sizeof(results)); 1221 memset(results, 0, sizeof(results));
1205 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1); 1222 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1);
1206 assert(results[0] == (void *)0xdeadbea0); 1223 assert(results[0] == (void *)0xdeadbea0);
1207 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0); 1224 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0);
1208 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0); 1225 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0);
1209 memset(results, 0, sizeof(results)); 1226 memset(results, 0, sizeof(results));
1210 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) == 1227 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) ==
1211 1); 1228 1);
1212 assert(results[0] == (void *)0xdeadbea0); 1229 assert(results[0] == (void *)0xdeadbea0);
1213 memset(results, 0, sizeof(results)); 1230 memset(results, 0, sizeof(results));
1214 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) == 1231 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) ==
1215 1); 1232 1);
1216 assert(results[0] == (void *)0xdeadbea0); 1233 assert(results[0] == (void *)0xdeadbea0);
1217 memset(results, 0, sizeof(results)); 1234 memset(results, 0, sizeof(results));
1218 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false) 1235 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1219 == 1); 1236 == 1);
1220 assert(results[0] == (void *)0xdeadbea0); 1237 assert(results[0] == (void *)0xdeadbea0);
1221 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true) 1238 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1222 == 0); 1239 == 0);
1223 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1) 1240 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1224 == 0); 1241 == 0);
1225 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1) 1242 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1226 == 0); 1243 == 0);
1227 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 1244 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1228 false, 1) == 0); 1245 false, 1) == 0);
1229 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 1246 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1230 true, 1) == 0); 1247 true, 1) == 0);
1231 assert(radix_tree_insert_node(t, 1000, (void *)0xdeadbea0) == 0); 1248 assert(radix_tree_insert_node(t, 1000, (void *)0xdeadbea0) == 0);
1232 assert(radix_tree_remove_node(t, 0) == (void *)0xdeadbea0); 1249 assert(radix_tree_remove_node(t, 0) == (void *)0xdeadbea0);
1233 assert(!radix_tree_empty_tree_p(t)); 1250 assert(!radix_tree_empty_tree_p(t));
1234 radix_tree_dump(t); 1251 radix_tree_dump(t);
1235 assert(radix_tree_lookup_node(t, 0) == NULL); 1252 assert(radix_tree_lookup_node(t, 0) == NULL);
1236 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0); 1253 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1237 memset(results, 0, sizeof(results)); 1254 memset(results, 0, sizeof(results));
1238 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1); 1255 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1);
1239 assert(results[0] == (void *)0xdeadbea0); 1256 assert(results[0] == (void *)0xdeadbea0);
1240 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0); 1257 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0);
1241 memset(results, 0, sizeof(results)); 1258 memset(results, 0, sizeof(results));
1242 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 1); 1259 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 1);
1243 assert(results[0] == (void *)0xdeadbea0); 1260 assert(results[0] == (void *)0xdeadbea0);
1244 memset(results, 0, sizeof(results)); 1261 memset(results, 0, sizeof(results));
1245 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 1); 1262 assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 1);
1246 assert(results[0] == (void *)0xdeadbea0); 1263 assert(results[0] == (void *)0xdeadbea0);
1247 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) 1264 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false)
1248 == 0); 1265 == 0);
1249 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) 1266 assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true)
1250 == 0); 1267 == 0);
1251 memset(results, 0, sizeof(results)); 1268 memset(results, 0, sizeof(results));
1252 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false) 1269 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1253 == 1); 1270 == 1);
1254 memset(results, 0, sizeof(results)); 1271 memset(results, 0, sizeof(results));
1255 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true) 1272 assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1256 == 1); 1273 == 1);
1257 assert(results[0] == (void *)0xdeadbea0); 1274 assert(results[0] == (void *)0xdeadbea0);
1258 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1) 1275 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1259 == 0); 1276 == 0);
1260 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1) 1277 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1261 == 0); 1278 == 0);
1262 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 1279 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1263 false, 1) == 0); 1280 false, 1) == 0);
1264 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 1281 assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1265 true, 1) == 0); 1282 true, 1) == 0);
1266 assert(!radix_tree_get_tag(t, 1000, 1)); 1283 assert(!radix_tree_get_tag(t, 1000, 1));
1267 assert(!radix_tree_get_tag(t, 1000, 2)); 1284 assert(!radix_tree_get_tag(t, 1000, 2));
1268 assert(radix_tree_get_tag(t, 1000, 2 | 1) == 0); 1285 assert(radix_tree_get_tag(t, 1000, 2 | 1) == 0);
1269 assert(radix_tree_empty_tagged_tree_p(t, 1)); 1286 assert(radix_tree_empty_tagged_tree_p(t, 1));
1270 assert(radix_tree_empty_tagged_tree_p(t, 2)); 1287 assert(radix_tree_empty_tagged_tree_p(t, 2));
1271 radix_tree_set_tag(t, 1000, 2); 1288 radix_tree_set_tag(t, 1000, 2);
1272 assert(!radix_tree_get_tag(t, 1000, 1)); 1289 assert(!radix_tree_get_tag(t, 1000, 1));
1273 assert(radix_tree_get_tag(t, 1000, 2)); 1290 assert(radix_tree_get_tag(t, 1000, 2));
1274 assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2); 1291 assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2);
1275 assert(radix_tree_empty_tagged_tree_p(t, 1)); 1292 assert(radix_tree_empty_tagged_tree_p(t, 1));
1276 assert(!radix_tree_empty_tagged_tree_p(t, 2)); 1293 assert(!radix_tree_empty_tagged_tree_p(t, 2));
1277 radix_tree_dump(t); 1294 radix_tree_dump(t);
1278 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0); 1295 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1279 assert(radix_tree_insert_node(t, 0, (void *)0xbea0) == 0); 1296 assert(radix_tree_insert_node(t, 0, (void *)0xbea0) == 0);
1280 radix_tree_dump(t); 1297 radix_tree_dump(t);
1281 assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0); 1298 assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1282 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0); 1299 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1283 assert(radix_tree_insert_node(t, UINT64_C(10000000000), (void *)0xdea0) 1300 assert(radix_tree_insert_node(t, UINT64_C(10000000000), (void *)0xdea0)
1284 == 0); 1301 == 0);
1285 radix_tree_dump(t); 1302 radix_tree_dump(t);
1286 assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0); 1303 assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1287 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0); 1304 assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1288 assert(radix_tree_lookup_node(t, UINT64_C(10000000000)) == 1305 assert(radix_tree_lookup_node(t, UINT64_C(10000000000)) ==
1289 (void *)0xdea0); 1306 (void *)0xdea0);
1290 radix_tree_dump(t); 1307 radix_tree_dump(t);
1291 assert(!radix_tree_get_tag(t, 0, 2)); 1308 assert(!radix_tree_get_tag(t, 0, 2));
1292 assert(radix_tree_get_tag(t, 1000, 2)); 1309 assert(radix_tree_get_tag(t, 1000, 2));
1293 assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 1)); 1310 assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 1));
1294 radix_tree_set_tag(t, 0, 2);; 1311 radix_tree_set_tag(t, 0, 2);;
1295 radix_tree_set_tag(t, UINT64_C(10000000000), 2); 1312 radix_tree_set_tag(t, UINT64_C(10000000000), 2);
1296 radix_tree_dump(t); 1313 radix_tree_dump(t);
1297 assert(radix_tree_get_tag(t, 0, 2)); 1314 assert(radix_tree_get_tag(t, 0, 2));
1298 assert(radix_tree_get_tag(t, 1000, 2)); 1315 assert(radix_tree_get_tag(t, 1000, 2));
1299 assert(radix_tree_get_tag(t, UINT64_C(10000000000), 2)); 1316 assert(radix_tree_get_tag(t, UINT64_C(10000000000), 2));
1300 radix_tree_clear_tag(t, 0, 2);; 1317 radix_tree_clear_tag(t, 0, 2);;
1301 radix_tree_clear_tag(t, UINT64_C(10000000000), 2); 1318 radix_tree_clear_tag(t, UINT64_C(10000000000), 2);
1302 radix_tree_dump(t); 1319 radix_tree_dump(t);
1303 assert(!radix_tree_get_tag(t, 0, 2)); 1320 assert(!radix_tree_get_tag(t, 0, 2));
1304 assert(radix_tree_get_tag(t, 1000, 2)); 1321 assert(radix_tree_get_tag(t, 1000, 2));
1305 assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 2)); 1322 assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 2));
1306 radix_tree_dump(t); 1323 radix_tree_dump(t);
1307 assert(radix_tree_replace_node(t, 1000, (void *)0x12345678) == 1324 assert(radix_tree_replace_node(t, 1000, (void *)0x12345678) ==
1308 (void *)0xdeadbea0); 1325 (void *)0xdeadbea0);
1309 assert(!radix_tree_get_tag(t, 1000, 1)); 1326 assert(!radix_tree_get_tag(t, 1000, 1));
1310 assert(radix_tree_get_tag(t, 1000, 2)); 1327 assert(radix_tree_get_tag(t, 1000, 2));
1311 assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2); 1328 assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2);
1312 memset(results, 0, sizeof(results)); 1329 memset(results, 0, sizeof(results));
1313 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 3); 1330 assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 3);
1314 assert(results[0] == (void *)0xbea0); 1331 assert(results[0] == (void *)0xbea0);
1315 assert(results[1] == (void *)0x12345678); 1332 assert(results[1] == (void *)0x12345678);
1316 assert(results[2] == (void *)0xdea0); 1333 assert(results[2] == (void *)0xdea0);
1317 memset(results, 0, sizeof(results)); 1334 memset(results, 0, sizeof(results));
1318 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1); 1335 assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1);
1319 assert(results[0] == (void *)0xbea0); 1336 assert(results[0] == (void *)0xbea0);
1320 memset(results, 0, sizeof(results)); 1337 memset(results, 0, sizeof(results));
1321 assert(radix_tree_gang_lookup_node(t, 1, results, 3, false) == 2); 1338 assert(radix_tree_gang_lookup_node(t, 1, results, 3, false) == 2);
1322 assert(results[0] == (void *)0x12345678); 1339 assert(results[0] == (void *)0x12345678);
1323 assert(results[1] == (void *)0xdea0); 1340 assert(results[1] == (void *)0xdea0);
1324 assert(radix_tree_gang_lookup_node(t, 1, results, 3, true) == 0); 1341 assert(radix_tree_gang_lookup_node(t, 1, results, 3, true) == 0);
1325 memset(results, 0, sizeof(results)); 1342 memset(results, 0, sizeof(results));
1326 assert(radix_tree_gang_lookup_node(t, 1001, results, 3, false) == 1); 1343 assert(radix_tree_gang_lookup_node(t, 1001, results, 3, false) == 1);
1327 assert(results[0] == (void *)0xdea0); 1344 assert(results[0] == (void *)0xdea0);
1328 assert(radix_tree_gang_lookup_node(t, 1001, results, 3, true) == 0); 1345 assert(radix_tree_gang_lookup_node(t, 1001, results, 3, true) == 0);
1329 assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3, 1346 assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3,
1330 false) == 0); 1347 false) == 0);
1331 assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3, 1348 assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3,
1332 true) == 0); 1349 true) == 0);
1333 assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results, 1350 assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1334 3, false) == 0); 1351 3, false) == 0);
1335 assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results, 1352 assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1336 3, true) == 0); 1353 3, true) == 0);
1337 memset(results, 0, sizeof(results)); 1354 memset(results, 0, sizeof(results));
1338 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, false, 2) 1355 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, false, 2)
1339 == 1); 1356 == 1);
1340 assert(results[0] == (void *)0x12345678); 1357 assert(results[0] == (void *)0x12345678);
1341 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, true, 2) 1358 assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, true, 2)
1342 == 0); 1359 == 0);
1343 assert(entry_tagmask(t->t_root) != 0); 1360 assert(entry_tagmask(t->t_root) != 0);
1344 assert(radix_tree_remove_node(t, 1000) == (void *)0x12345678); 1361 assert(radix_tree_remove_node(t, 1000) == (void *)0x12345678);
1345 assert(entry_tagmask(t->t_root) == 0); 1362 assert(entry_tagmask(t->t_root) == 0);
1346 radix_tree_dump(t); 1363 radix_tree_dump(t);
1347 assert(radix_tree_insert_node(t, UINT64_C(10000000001), (void *)0xfff0) 1364 assert(radix_tree_insert_node(t, UINT64_C(10000000001), (void *)0xfff0)
1348 == 0); 1365 == 0);
1349 memset(results, 0, sizeof(results)); 1366 memset(results, 0, sizeof(results));
1350 assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000000), results, 3, 1367 assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000000), results, 3,

cvs diff -r1.6 -r1.7 src/sys/sys/radixtree.h (switch to unified diff)

--- src/sys/sys/radixtree.h 2019/12/05 18:32:25 1.6
+++ src/sys/sys/radixtree.h 2020/01/28 16:33:34 1.7
@@ -1,89 +1,90 @@ @@ -1,89 +1,90 @@
1/* $NetBSD: radixtree.h,v 1.6 2019/12/05 18:32:25 ad Exp $ */ 1/* $NetBSD: radixtree.h,v 1.7 2020/01/28 16:33:34 ad Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c)2011 YAMAMOTO Takashi, 4 * Copyright (c)2011 YAMAMOTO Takashi,
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * Redistribution and use in source and binary forms, with or without 7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions 8 * modification, are permitted provided that the following conditions
9 * are met: 9 * are met:
10 * 1. Redistributions of source code must retain the above copyright 10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer. 11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright 12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the 13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution. 14 * documentation and/or other materials provided with the distribution.
15 * 15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE. 26 * SUCH DAMAGE.
27 */ 27 */
28 28
29#if !defined(_SYS_RADIXTREE_H_) 29#if !defined(_SYS_RADIXTREE_H_)
30#define _SYS_RADIXTREE_H_ 30#define _SYS_RADIXTREE_H_
31 31
32struct radix_tree { 32struct radix_tree {
33 void *t_root; 33 void *t_root;
34 unsigned int t_height; 34 unsigned int t_height;
35}; 35};
36 36
37#if defined(_KERNEL) || defined(_STANDALONE) 37#if defined(_KERNEL) || defined(_STANDALONE)
38#include <sys/types.h> 38#include <sys/types.h>
39#else /* defined(_KERNEL) || defined(_STANDALONE) */ 39#else /* defined(_KERNEL) || defined(_STANDALONE) */
40#include <stdbool.h> 40#include <stdbool.h>
41#include <stdint.h> 41#include <stdint.h>
42#endif /* defined(_KERNEL) || defined(_STANDALONE) */ 42#endif /* defined(_KERNEL) || defined(_STANDALONE) */
43 43
44/* 44/*
45 * subsystem 45 * subsystem
46 */ 46 */
47 47
48#if defined(_KERNEL) 48#if defined(_KERNEL)
49void radix_tree_init(void); 49void radix_tree_init(void);
 50void radix_tree_await_memory(void);
50#endif /* defined(_KERNEL) */ 51#endif /* defined(_KERNEL) */
51 52
52/* 53/*
53 * tree 54 * tree
54 */ 55 */
55 56
56void radix_tree_init_tree(struct radix_tree *); 57void radix_tree_init_tree(struct radix_tree *);
57void radix_tree_fini_tree(struct radix_tree *); 58void radix_tree_fini_tree(struct radix_tree *);
58bool radix_tree_empty_tree_p(struct radix_tree *); 59bool radix_tree_empty_tree_p(struct radix_tree *);
59 60
60/* 61/*
61 * node 62 * node
62 */ 63 */
63 64
64int radix_tree_insert_node(struct radix_tree *, uint64_t, void *); 65int radix_tree_insert_node(struct radix_tree *, uint64_t, void *);
65void *radix_tree_replace_node(struct radix_tree *, uint64_t, void *); 66void *radix_tree_replace_node(struct radix_tree *, uint64_t, void *);
66void *radix_tree_remove_node(struct radix_tree *, uint64_t); 67void *radix_tree_remove_node(struct radix_tree *, uint64_t);
67void *radix_tree_lookup_node(struct radix_tree *, uint64_t); 68void *radix_tree_lookup_node(struct radix_tree *, uint64_t);
68unsigned int radix_tree_gang_lookup_node(struct radix_tree *, uint64_t, 69unsigned int radix_tree_gang_lookup_node(struct radix_tree *, uint64_t,
69 void **, unsigned int, bool); 70 void **, unsigned int, bool);
70unsigned int radix_tree_gang_lookup_node_reverse(struct radix_tree *, uint64_t, 71unsigned int radix_tree_gang_lookup_node_reverse(struct radix_tree *, uint64_t,
71 void **, unsigned int, bool); 72 void **, unsigned int, bool);
72 73
73/* 74/*
74 * tag 75 * tag
75 */ 76 */
76 77
77typedef unsigned int radix_tree_tagmask_t; 78typedef unsigned int radix_tree_tagmask_t;
78#define RADIX_TREE_TAG_ID_MAX 2 79#define RADIX_TREE_TAG_ID_MAX 2
79radix_tree_tagmask_t radix_tree_get_tag(struct radix_tree *, uint64_t, 80radix_tree_tagmask_t radix_tree_get_tag(struct radix_tree *, uint64_t,
80 radix_tree_tagmask_t); 81 radix_tree_tagmask_t);
81void radix_tree_set_tag(struct radix_tree *, uint64_t, radix_tree_tagmask_t); 82void radix_tree_set_tag(struct radix_tree *, uint64_t, radix_tree_tagmask_t);
82void radix_tree_clear_tag(struct radix_tree *, uint64_t, radix_tree_tagmask_t); 83void radix_tree_clear_tag(struct radix_tree *, uint64_t, radix_tree_tagmask_t);
83unsigned int radix_tree_gang_lookup_tagged_node(struct radix_tree *, uint64_t, 84unsigned int radix_tree_gang_lookup_tagged_node(struct radix_tree *, uint64_t,
84 void **, unsigned int, bool, radix_tree_tagmask_t); 85 void **, unsigned int, bool, radix_tree_tagmask_t);
85unsigned int radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *, 86unsigned int radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *,
86 uint64_t, void **, unsigned int, bool, radix_tree_tagmask_t); 87 uint64_t, void **, unsigned int, bool, radix_tree_tagmask_t);
87bool radix_tree_empty_tagged_tree_p(struct radix_tree *, radix_tree_tagmask_t); 88bool radix_tree_empty_tagged_tree_p(struct radix_tree *, radix_tree_tagmask_t);
88 89
89#endif /* !defined(_SYS_RADIXTREE_H_) */ 90#endif /* !defined(_SYS_RADIXTREE_H_) */