master
1/* $NetBSD: tree.h,v 1.8 2004/03/28 19:38:30 provos Exp $ */
2/* $OpenBSD: tree.h,v 1.7 2002/10/17 21:51:54 art Exp $ */
3
4/*-
5 * SPDX-License-Identifier: BSD-2-Clause
6 *
7 * Copyright 2002 Niels Provos <provos@citi.umich.edu>
8 * All rights reserved.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
20 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
21 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
23 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
24 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#ifndef _SYS_TREE_H_
32#define _SYS_TREE_H_
33
34#include <sys/cdefs.h>
35
36/*
37 * This file defines data structures for different types of trees:
38 * splay trees and rank-balanced trees.
39 *
40 * A splay tree is a self-organizing data structure. Every operation
41 * on the tree causes a splay to happen. The splay moves the requested
42 * node to the root of the tree and partly rebalances it.
43 *
44 * This has the benefit that request locality causes faster lookups as
45 * the requested nodes move to the top of the tree. On the other hand,
46 * every lookup causes memory writes.
47 *
48 * The Balance Theorem bounds the total access time for m operations
49 * and n inserts on an initially empty tree as O((m + n)lg n). The
50 * amortized cost for a sequence of m accesses to a splay tree is O(lg n);
51 *
52 * A rank-balanced tree is a binary search tree with an integer
53 * rank-difference as an attribute of each pointer from parent to child.
54 * The sum of the rank-differences on any path from a node down to null is
55 * the same, and defines the rank of that node. The rank of the null node
56 * is -1.
57 *
58 * Different additional conditions define different sorts of balanced trees,
59 * including "red-black" and "AVL" trees. The set of conditions applied here
60 * are the "weak-AVL" conditions of Haeupler, Sen and Tarjan presented in in
61 * "Rank Balanced Trees", ACM Transactions on Algorithms Volume 11 Issue 4 June
62 * 2015 Article No.: 30pp 1–26 https://doi.org/10.1145/2689412 (the HST paper):
63 * - every rank-difference is 1 or 2.
64 * - the rank of any leaf is 1.
65 *
66 * For historical reasons, rank differences that are even are associated
67 * with the color red (Rank-Even-Difference), and the child that a red edge
68 * points to is called a red child.
69 *
70 * Every operation on a rank-balanced tree is bounded as O(lg n).
71 * The maximum height of a rank-balanced tree is 2lg (n+1).
72 */
73
74#define SPLAY_HEAD(name, type) \
75struct name { \
76 struct type *sph_root; /* root of the tree */ \
77}
78
79#define SPLAY_INITIALIZER(root) \
80 { NULL }
81
82#define SPLAY_INIT(root) do { \
83 (root)->sph_root = NULL; \
84} while (/*CONSTCOND*/ 0)
85
86#define SPLAY_ENTRY(type) \
87struct { \
88 struct type *spe_left; /* left element */ \
89 struct type *spe_right; /* right element */ \
90}
91
92#define SPLAY_LEFT(elm, field) (elm)->field.spe_left
93#define SPLAY_RIGHT(elm, field) (elm)->field.spe_right
94#define SPLAY_ROOT(head) (head)->sph_root
95#define SPLAY_EMPTY(head) (SPLAY_ROOT(head) == NULL)
96
97/* SPLAY_ROTATE_{LEFT,RIGHT} expect that tmp hold SPLAY_{RIGHT,LEFT} */
98#define SPLAY_ROTATE_RIGHT(head, tmp, field) do { \
99 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(tmp, field); \
100 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
101 (head)->sph_root = tmp; \
102} while (/*CONSTCOND*/ 0)
103
104#define SPLAY_ROTATE_LEFT(head, tmp, field) do { \
105 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(tmp, field); \
106 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
107 (head)->sph_root = tmp; \
108} while (/*CONSTCOND*/ 0)
109
110#define SPLAY_LINKLEFT(head, tmp, field) do { \
111 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
112 tmp = (head)->sph_root; \
113 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field); \
114} while (/*CONSTCOND*/ 0)
115
116#define SPLAY_LINKRIGHT(head, tmp, field) do { \
117 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
118 tmp = (head)->sph_root; \
119 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field); \
120} while (/*CONSTCOND*/ 0)
121
122#define SPLAY_ASSEMBLE(head, node, left, right, field) do { \
123 SPLAY_RIGHT(left, field) = SPLAY_LEFT((head)->sph_root, field); \
124 SPLAY_LEFT(right, field) = SPLAY_RIGHT((head)->sph_root, field);\
125 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(node, field); \
126 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(node, field); \
127} while (/*CONSTCOND*/ 0)
128
129/* Generates prototypes and inline functions */
130
131#define SPLAY_PROTOTYPE(name, type, field, cmp) \
132void name##_SPLAY(struct name *, struct type *); \
133void name##_SPLAY_MINMAX(struct name *, int); \
134struct type *name##_SPLAY_INSERT(struct name *, struct type *); \
135struct type *name##_SPLAY_REMOVE(struct name *, struct type *); \
136 \
137/* Finds the node with the same key as elm */ \
138static __unused __inline struct type * \
139name##_SPLAY_FIND(struct name *head, struct type *elm) \
140{ \
141 if (SPLAY_EMPTY(head)) \
142 return(NULL); \
143 name##_SPLAY(head, elm); \
144 if ((cmp)(elm, (head)->sph_root) == 0) \
145 return (head->sph_root); \
146 return (NULL); \
147} \
148 \
149static __unused __inline struct type * \
150name##_SPLAY_NEXT(struct name *head, struct type *elm) \
151{ \
152 name##_SPLAY(head, elm); \
153 if (SPLAY_RIGHT(elm, field) != NULL) { \
154 elm = SPLAY_RIGHT(elm, field); \
155 while (SPLAY_LEFT(elm, field) != NULL) { \
156 elm = SPLAY_LEFT(elm, field); \
157 } \
158 } else \
159 elm = NULL; \
160 return (elm); \
161} \
162 \
163static __unused __inline struct type * \
164name##_SPLAY_MIN_MAX(struct name *head, int val) \
165{ \
166 name##_SPLAY_MINMAX(head, val); \
167 return (SPLAY_ROOT(head)); \
168}
169
170/* Main splay operation.
171 * Moves node close to the key of elm to top
172 */
173#define SPLAY_GENERATE(name, type, field, cmp) \
174struct type * \
175name##_SPLAY_INSERT(struct name *head, struct type *elm) \
176{ \
177 if (SPLAY_EMPTY(head)) { \
178 SPLAY_LEFT(elm, field) = SPLAY_RIGHT(elm, field) = NULL; \
179 } else { \
180 __typeof(cmp(NULL, NULL)) __comp; \
181 name##_SPLAY(head, elm); \
182 __comp = (cmp)(elm, (head)->sph_root); \
183 if (__comp < 0) { \
184 SPLAY_LEFT(elm, field) = SPLAY_LEFT((head)->sph_root, field);\
185 SPLAY_RIGHT(elm, field) = (head)->sph_root; \
186 SPLAY_LEFT((head)->sph_root, field) = NULL; \
187 } else if (__comp > 0) { \
188 SPLAY_RIGHT(elm, field) = SPLAY_RIGHT((head)->sph_root, field);\
189 SPLAY_LEFT(elm, field) = (head)->sph_root; \
190 SPLAY_RIGHT((head)->sph_root, field) = NULL; \
191 } else \
192 return ((head)->sph_root); \
193 } \
194 (head)->sph_root = (elm); \
195 return (NULL); \
196} \
197 \
198struct type * \
199name##_SPLAY_REMOVE(struct name *head, struct type *elm) \
200{ \
201 struct type *__tmp; \
202 if (SPLAY_EMPTY(head)) \
203 return (NULL); \
204 name##_SPLAY(head, elm); \
205 if ((cmp)(elm, (head)->sph_root) == 0) { \
206 if (SPLAY_LEFT((head)->sph_root, field) == NULL) { \
207 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);\
208 } else { \
209 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
210 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field);\
211 name##_SPLAY(head, elm); \
212 SPLAY_RIGHT((head)->sph_root, field) = __tmp; \
213 } \
214 return (elm); \
215 } \
216 return (NULL); \
217} \
218 \
219void \
220name##_SPLAY(struct name *head, struct type *elm) \
221{ \
222 struct type __node, *__left, *__right, *__tmp; \
223 __typeof(cmp(NULL, NULL)) __comp; \
224\
225 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
226 __left = __right = &__node; \
227\
228 while ((__comp = (cmp)(elm, (head)->sph_root)) != 0) { \
229 if (__comp < 0) { \
230 __tmp = SPLAY_LEFT((head)->sph_root, field); \
231 if (__tmp == NULL) \
232 break; \
233 if ((cmp)(elm, __tmp) < 0){ \
234 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
235 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
236 break; \
237 } \
238 SPLAY_LINKLEFT(head, __right, field); \
239 } else if (__comp > 0) { \
240 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
241 if (__tmp == NULL) \
242 break; \
243 if ((cmp)(elm, __tmp) > 0){ \
244 SPLAY_ROTATE_LEFT(head, __tmp, field); \
245 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
246 break; \
247 } \
248 SPLAY_LINKRIGHT(head, __left, field); \
249 } \
250 } \
251 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
252} \
253 \
254/* Splay with either the minimum or the maximum element \
255 * Used to find minimum or maximum element in tree. \
256 */ \
257void name##_SPLAY_MINMAX(struct name *head, int __comp) \
258{ \
259 struct type __node, *__left, *__right, *__tmp; \
260\
261 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
262 __left = __right = &__node; \
263\
264 while (1) { \
265 if (__comp < 0) { \
266 __tmp = SPLAY_LEFT((head)->sph_root, field); \
267 if (__tmp == NULL) \
268 break; \
269 if (__comp < 0){ \
270 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
271 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
272 break; \
273 } \
274 SPLAY_LINKLEFT(head, __right, field); \
275 } else if (__comp > 0) { \
276 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
277 if (__tmp == NULL) \
278 break; \
279 if (__comp > 0) { \
280 SPLAY_ROTATE_LEFT(head, __tmp, field); \
281 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
282 break; \
283 } \
284 SPLAY_LINKRIGHT(head, __left, field); \
285 } \
286 } \
287 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
288}
289
290#define SPLAY_NEGINF -1
291#define SPLAY_INF 1
292
293#define SPLAY_INSERT(name, x, y) name##_SPLAY_INSERT(x, y)
294#define SPLAY_REMOVE(name, x, y) name##_SPLAY_REMOVE(x, y)
295#define SPLAY_FIND(name, x, y) name##_SPLAY_FIND(x, y)
296#define SPLAY_NEXT(name, x, y) name##_SPLAY_NEXT(x, y)
297#define SPLAY_MIN(name, x) (SPLAY_EMPTY(x) ? NULL \
298 : name##_SPLAY_MIN_MAX(x, SPLAY_NEGINF))
299#define SPLAY_MAX(name, x) (SPLAY_EMPTY(x) ? NULL \
300 : name##_SPLAY_MIN_MAX(x, SPLAY_INF))
301
302#define SPLAY_FOREACH(x, name, head) \
303 for ((x) = SPLAY_MIN(name, head); \
304 (x) != NULL; \
305 (x) = SPLAY_NEXT(name, head, x))
306
307/* Macros that define a rank-balanced tree */
308#define RB_HEAD(name, type) \
309struct name { \
310 struct type *rbh_root; /* root of the tree */ \
311}
312
313#define RB_INITIALIZER(root) \
314 { NULL }
315
316#define RB_INIT(root) do { \
317 (root)->rbh_root = NULL; \
318} while (/*CONSTCOND*/ 0)
319
320#define RB_ENTRY(type) \
321struct { \
322 struct type *rbe_link[3]; \
323}
324
325/*
326 * With the expectation that any object of struct type has an
327 * address that is a multiple of 4, and that therefore the
328 * 2 least significant bits of a pointer to struct type are
329 * always zero, this implementation sets those bits to indicate
330 * that the left or right child of the tree node is "red".
331 */
332#define _RB_LINK(elm, dir, field) (elm)->field.rbe_link[dir]
333#define _RB_UP(elm, field) _RB_LINK(elm, 0, field)
334#define _RB_L ((__uintptr_t)1)
335#define _RB_R ((__uintptr_t)2)
336#define _RB_LR ((__uintptr_t)3)
337#define _RB_BITS(elm) (*(__uintptr_t *)&elm)
338#define _RB_BITSUP(elm, field) _RB_BITS(_RB_UP(elm, field))
339#define _RB_PTR(elm) (__typeof(elm)) \
340 ((__uintptr_t)elm & ~_RB_LR)
341
342#define RB_PARENT(elm, field) _RB_PTR(_RB_UP(elm, field))
343#define RB_LEFT(elm, field) _RB_LINK(elm, _RB_L, field)
344#define RB_RIGHT(elm, field) _RB_LINK(elm, _RB_R, field)
345#define RB_ROOT(head) (head)->rbh_root
346#define RB_EMPTY(head) (RB_ROOT(head) == NULL)
347
348#define RB_SET_PARENT(dst, src, field) do { \
349 _RB_BITSUP(dst, field) = (__uintptr_t)src | \
350 (_RB_BITSUP(dst, field) & _RB_LR); \
351} while (/*CONSTCOND*/ 0)
352
353#define RB_SET(elm, parent, field) do { \
354 _RB_UP(elm, field) = parent; \
355 RB_LEFT(elm, field) = RB_RIGHT(elm, field) = NULL; \
356} while (/*CONSTCOND*/ 0)
357
358/*
359 * Either RB_AUGMENT or RB_AUGMENT_CHECK is invoked in a loop at the root of
360 * every modified subtree, from the bottom up to the root, to update augmented
361 * node data. RB_AUGMENT_CHECK returns true only when the update changes the
362 * node data, so that updating can be stopped short of the root when it returns
363 * false.
364 */
365#ifndef RB_AUGMENT_CHECK
366#ifndef RB_AUGMENT
367#define RB_AUGMENT_CHECK(x) 0
368#else
369#define RB_AUGMENT_CHECK(x) (RB_AUGMENT(x), 1)
370#endif
371#endif
372
373#define RB_UPDATE_AUGMENT(elm, field) do { \
374 __typeof(elm) rb_update_tmp = (elm); \
375 while (RB_AUGMENT_CHECK(rb_update_tmp) && \
376 (rb_update_tmp = RB_PARENT(rb_update_tmp, field)) != NULL) \
377 ; \
378} while (0)
379
380#define RB_SWAP_CHILD(head, par, out, in, field) do { \
381 if (par == NULL) \
382 RB_ROOT(head) = (in); \
383 else if ((out) == RB_LEFT(par, field)) \
384 RB_LEFT(par, field) = (in); \
385 else \
386 RB_RIGHT(par, field) = (in); \
387} while (/*CONSTCOND*/ 0)
388
389/*
390 * RB_ROTATE macro partially restructures the tree to improve balance. In the
391 * case when dir is _RB_L, tmp is a right child of elm. After rotation, elm
392 * is a left child of tmp, and the subtree that represented the items between
393 * them, which formerly hung to the left of tmp now hangs to the right of elm.
394 * The parent-child relationship between elm and its former parent is not
395 * changed; where this macro once updated those fields, that is now left to the
396 * caller of RB_ROTATE to clean up, so that a pair of rotations does not twice
397 * update the same pair of pointer fields with distinct values.
398 */
399#define RB_ROTATE(elm, tmp, dir, field) do { \
400 if ((_RB_LINK(elm, dir ^ _RB_LR, field) = \
401 _RB_LINK(tmp, dir, field)) != NULL) \
402 RB_SET_PARENT(_RB_LINK(tmp, dir, field), elm, field); \
403 _RB_LINK(tmp, dir, field) = (elm); \
404 RB_SET_PARENT(elm, tmp, field); \
405} while (/*CONSTCOND*/ 0)
406
407/* Generates prototypes and inline functions */
408#define RB_PROTOTYPE(name, type, field, cmp) \
409 RB_PROTOTYPE_INTERNAL(name, type, field, cmp,)
410#define RB_PROTOTYPE_STATIC(name, type, field, cmp) \
411 RB_PROTOTYPE_INTERNAL(name, type, field, cmp, __unused static)
412#define RB_PROTOTYPE_INTERNAL(name, type, field, cmp, attr) \
413 RB_PROTOTYPE_RANK(name, type, attr) \
414 RB_PROTOTYPE_INSERT_COLOR(name, type, attr); \
415 RB_PROTOTYPE_REMOVE_COLOR(name, type, attr); \
416 RB_PROTOTYPE_INSERT_FINISH(name, type, attr); \
417 RB_PROTOTYPE_INSERT(name, type, attr); \
418 RB_PROTOTYPE_REMOVE(name, type, attr); \
419 RB_PROTOTYPE_FIND(name, type, attr); \
420 RB_PROTOTYPE_NFIND(name, type, attr); \
421 RB_PROTOTYPE_NEXT(name, type, attr); \
422 RB_PROTOTYPE_INSERT_NEXT(name, type, attr); \
423 RB_PROTOTYPE_PREV(name, type, attr); \
424 RB_PROTOTYPE_INSERT_PREV(name, type, attr); \
425 RB_PROTOTYPE_MINMAX(name, type, attr); \
426 RB_PROTOTYPE_REINSERT(name, type, attr);
427#ifdef _RB_DIAGNOSTIC
428#define RB_PROTOTYPE_RANK(name, type, attr) \
429 attr int name##_RB_RANK(struct type *);
430#else
431#define RB_PROTOTYPE_RANK(name, type, attr)
432#endif
433#define RB_PROTOTYPE_INSERT_COLOR(name, type, attr) \
434 attr struct type *name##_RB_INSERT_COLOR(struct name *, \
435 struct type *, struct type *)
436#define RB_PROTOTYPE_REMOVE_COLOR(name, type, attr) \
437 attr struct type *name##_RB_REMOVE_COLOR(struct name *, \
438 struct type *, struct type *)
439#define RB_PROTOTYPE_REMOVE(name, type, attr) \
440 attr struct type *name##_RB_REMOVE(struct name *, struct type *)
441#define RB_PROTOTYPE_INSERT_FINISH(name, type, attr) \
442 attr struct type *name##_RB_INSERT_FINISH(struct name *, \
443 struct type *, struct type **, struct type *)
444#define RB_PROTOTYPE_INSERT(name, type, attr) \
445 attr struct type *name##_RB_INSERT(struct name *, struct type *)
446#define RB_PROTOTYPE_FIND(name, type, attr) \
447 attr struct type *name##_RB_FIND(struct name *, struct type *)
448#define RB_PROTOTYPE_NFIND(name, type, attr) \
449 attr struct type *name##_RB_NFIND(struct name *, struct type *)
450#define RB_PROTOTYPE_NEXT(name, type, attr) \
451 attr struct type *name##_RB_NEXT(struct type *)
452#define RB_PROTOTYPE_INSERT_NEXT(name, type, attr) \
453 attr struct type *name##_RB_INSERT_NEXT(struct name *, \
454 struct type *, struct type *)
455#define RB_PROTOTYPE_PREV(name, type, attr) \
456 attr struct type *name##_RB_PREV(struct type *)
457#define RB_PROTOTYPE_INSERT_PREV(name, type, attr) \
458 attr struct type *name##_RB_INSERT_PREV(struct name *, \
459 struct type *, struct type *)
460#define RB_PROTOTYPE_MINMAX(name, type, attr) \
461 attr struct type *name##_RB_MINMAX(struct name *, int)
462#define RB_PROTOTYPE_REINSERT(name, type, attr) \
463 attr struct type *name##_RB_REINSERT(struct name *, struct type *)
464
465/* Main rb operation.
466 * Moves node close to the key of elm to top
467 */
468#define RB_GENERATE(name, type, field, cmp) \
469 RB_GENERATE_INTERNAL(name, type, field, cmp,)
470#define RB_GENERATE_STATIC(name, type, field, cmp) \
471 RB_GENERATE_INTERNAL(name, type, field, cmp, __unused static)
472#define RB_GENERATE_INTERNAL(name, type, field, cmp, attr) \
473 RB_GENERATE_RANK(name, type, field, attr) \
474 RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
475 RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
476 RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
477 RB_GENERATE_INSERT(name, type, field, cmp, attr) \
478 RB_GENERATE_REMOVE(name, type, field, attr) \
479 RB_GENERATE_FIND(name, type, field, cmp, attr) \
480 RB_GENERATE_NFIND(name, type, field, cmp, attr) \
481 RB_GENERATE_NEXT(name, type, field, attr) \
482 RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
483 RB_GENERATE_PREV(name, type, field, attr) \
484 RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
485 RB_GENERATE_MINMAX(name, type, field, attr) \
486 RB_GENERATE_REINSERT(name, type, field, cmp, attr)
487
488#ifdef _RB_DIAGNOSTIC
489#ifndef RB_AUGMENT
490#define _RB_AUGMENT_VERIFY(x) RB_AUGMENT_CHECK(x)
491#else
492#define _RB_AUGMENT_VERIFY(x) 0
493#endif
494#define RB_GENERATE_RANK(name, type, field, attr) \
495/* \
496 * Return the rank of the subtree rooted at elm, or -1 if the subtree \
497 * is not rank-balanced, or has inconsistent augmentation data.
498 */ \
499attr int \
500name##_RB_RANK(struct type *elm) \
501{ \
502 struct type *left, *right, *up; \
503 int left_rank, right_rank; \
504 \
505 if (elm == NULL) \
506 return (0); \
507 up = _RB_UP(elm, field); \
508 left = RB_LEFT(elm, field); \
509 left_rank = ((_RB_BITS(up) & _RB_L) ? 2 : 1) + \
510 name##_RB_RANK(left); \
511 right = RB_RIGHT(elm, field); \
512 right_rank = ((_RB_BITS(up) & _RB_R) ? 2 : 1) + \
513 name##_RB_RANK(right); \
514 if (left_rank != right_rank || \
515 (left_rank == 2 && left == NULL && right == NULL) || \
516 _RB_AUGMENT_VERIFY(elm)) \
517 return (-1); \
518 return (left_rank); \
519}
520#else
521#define RB_GENERATE_RANK(name, type, field, attr)
522#endif
523
524#define RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
525attr struct type * \
526name##_RB_INSERT_COLOR(struct name *head, \
527 struct type *parent, struct type *elm) \
528{ \
529 /* \
530 * Initially, elm is a leaf. Either its parent was previously \
531 * a leaf, with two black null children, or an interior node \
532 * with a black non-null child and a red null child. The \
533 * balance criterion "the rank of any leaf is 1" precludes the \
534 * possibility of two red null children for the initial parent. \
535 * So the first loop iteration cannot lead to accessing an \
536 * uninitialized 'child', and a later iteration can only happen \
537 * when a value has been assigned to 'child' in the previous \
538 * one. \
539 */ \
540 struct type *child, *child_up, *gpar; \
541 __uintptr_t elmdir, sibdir; \
542 \
543 do { \
544 /* the rank of the tree rooted at elm grew */ \
545 gpar = _RB_UP(parent, field); \
546 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
547 if (_RB_BITS(gpar) & elmdir) { \
548 /* shorten the parent-elm edge to rebalance */ \
549 _RB_BITSUP(parent, field) ^= elmdir; \
550 return (NULL); \
551 } \
552 sibdir = elmdir ^ _RB_LR; \
553 /* the other edge must change length */ \
554 _RB_BITSUP(parent, field) ^= sibdir; \
555 if ((_RB_BITS(gpar) & _RB_LR) == 0) { \
556 /* both edges now short, retry from parent */ \
557 child = elm; \
558 elm = parent; \
559 continue; \
560 } \
561 _RB_UP(parent, field) = gpar = _RB_PTR(gpar); \
562 if (_RB_BITSUP(elm, field) & elmdir) { \
563 /* \
564 * Exactly one of the edges descending from elm \
565 * is long. The long one is in the same \
566 * direction as the edge from parent to elm, \
567 * so change that by rotation. The edge from \
568 * parent to z was shortened above. Shorten \
569 * the long edge down from elm, and adjust \
570 * other edge lengths based on the downward \
571 * edges from 'child'. \
572 * \
573 * par par \
574 * / \ / \ \
575 * elm z / z \
576 * / \ child \
577 * / child / \ \
578 * / / \ elm \ \
579 * w / \ / \ y \
580 * x y w \ \
581 * x \
582 */ \
583 RB_ROTATE(elm, child, elmdir, field); \
584 child_up = _RB_UP(child, field); \
585 if (_RB_BITS(child_up) & sibdir) \
586 _RB_BITSUP(parent, field) ^= elmdir; \
587 if (_RB_BITS(child_up) & elmdir) \
588 _RB_BITSUP(elm, field) ^= _RB_LR; \
589 else \
590 _RB_BITSUP(elm, field) ^= elmdir; \
591 /* if child is a leaf, don't augment elm, \
592 * since it is restored to be a leaf again. */ \
593 if ((_RB_BITS(child_up) & _RB_LR) == 0) \
594 elm = child; \
595 } else \
596 child = elm; \
597 \
598 /* \
599 * The long edge descending from 'child' points back \
600 * in the direction of 'parent'. Rotate to make \
601 * 'parent' a child of 'child', then make both edges \
602 * of 'child' short to rebalance. \
603 * \
604 * par child \
605 * / \ / \ \
606 * / z x par \
607 * child / \ \
608 * / \ / z \
609 * x \ y \
610 * y \
611 */ \
612 RB_ROTATE(parent, child, sibdir, field); \
613 _RB_UP(child, field) = gpar; \
614 RB_SWAP_CHILD(head, gpar, parent, child, field); \
615 /* \
616 * Elements rotated down have new, smaller subtrees, \
617 * so update augmentation for them. \
618 */ \
619 if (elm != child) \
620 (void)RB_AUGMENT_CHECK(elm); \
621 (void)RB_AUGMENT_CHECK(parent); \
622 return (child); \
623 } while ((parent = gpar) != NULL); \
624 return (NULL); \
625}
626
627#ifndef RB_STRICT_HST
628/*
629 * In REMOVE_COLOR, the HST paper, in figure 3, in the single-rotate case, has
630 * 'parent' with one higher rank, and then reduces its rank if 'parent' has
631 * become a leaf. This implementation always has the parent in its new position
632 * with lower rank, to avoid the leaf check. Define RB_STRICT_HST to 1 to get
633 * the behavior that HST describes.
634 */
635#define RB_STRICT_HST 0
636#endif
637
638#define RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
639attr struct type * \
640name##_RB_REMOVE_COLOR(struct name *head, \
641 struct type *parent, struct type *elm) \
642{ \
643 struct type *gpar, *sib, *up; \
644 __uintptr_t elmdir, sibdir; \
645 \
646 if (RB_RIGHT(parent, field) == elm && \
647 RB_LEFT(parent, field) == elm) { \
648 /* Deleting a leaf that is an only-child creates a \
649 * rank-2 leaf. Demote that leaf. */ \
650 _RB_UP(parent, field) = _RB_PTR(_RB_UP(parent, field)); \
651 elm = parent; \
652 if ((parent = _RB_UP(elm, field)) == NULL) \
653 return (NULL); \
654 } \
655 do { \
656 /* the rank of the tree rooted at elm shrank */ \
657 gpar = _RB_UP(parent, field); \
658 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
659 _RB_BITS(gpar) ^= elmdir; \
660 if (_RB_BITS(gpar) & elmdir) { \
661 /* lengthen the parent-elm edge to rebalance */ \
662 _RB_UP(parent, field) = gpar; \
663 return (NULL); \
664 } \
665 if (_RB_BITS(gpar) & _RB_LR) { \
666 /* shorten other edge, retry from parent */ \
667 _RB_BITS(gpar) ^= _RB_LR; \
668 _RB_UP(parent, field) = gpar; \
669 gpar = _RB_PTR(gpar); \
670 continue; \
671 } \
672 sibdir = elmdir ^ _RB_LR; \
673 sib = _RB_LINK(parent, sibdir, field); \
674 up = _RB_UP(sib, field); \
675 _RB_BITS(up) ^= _RB_LR; \
676 if ((_RB_BITS(up) & _RB_LR) == 0) { \
677 /* shorten edges descending from sib, retry */ \
678 _RB_UP(sib, field) = up; \
679 continue; \
680 } \
681 if ((_RB_BITS(up) & sibdir) == 0) { \
682 /* \
683 * The edge descending from 'sib' away from \
684 * 'parent' is long. The short edge descending \
685 * from 'sib' toward 'parent' points to 'elm*' \
686 * Rotate to make 'sib' a child of 'elm*' \
687 * then adjust the lengths of the edges \
688 * descending from 'sib' and 'elm*'. \
689 * \
690 * par par \
691 * / \ / \ \
692 * / sib elm \ \
693 * / / \ elm* \
694 * elm elm* \ / \ \
695 * / \ \ / \ \
696 * / \ z / \ \
697 * x y x sib \
698 * / \ \
699 * / z \
700 * y \
701 */ \
702 elm = _RB_LINK(sib, elmdir, field); \
703 /* elm is a 1-child. First rotate at elm. */ \
704 RB_ROTATE(sib, elm, sibdir, field); \
705 up = _RB_UP(elm, field); \
706 _RB_BITSUP(parent, field) ^= \
707 (_RB_BITS(up) & elmdir) ? _RB_LR : elmdir; \
708 _RB_BITSUP(sib, field) ^= \
709 (_RB_BITS(up) & sibdir) ? _RB_LR : sibdir; \
710 _RB_BITSUP(elm, field) |= _RB_LR; \
711 } else { \
712 if ((_RB_BITS(up) & elmdir) == 0 && \
713 RB_STRICT_HST && elm != NULL) { \
714 /* if parent does not become a leaf, \
715 do not demote parent yet. */ \
716 _RB_BITSUP(parent, field) ^= sibdir; \
717 _RB_BITSUP(sib, field) ^= _RB_LR; \
718 } else if ((_RB_BITS(up) & elmdir) == 0) { \
719 /* demote parent. */ \
720 _RB_BITSUP(parent, field) ^= elmdir; \
721 _RB_BITSUP(sib, field) ^= sibdir; \
722 } else \
723 _RB_BITSUP(sib, field) ^= sibdir; \
724 elm = sib; \
725 } \
726 \
727 /* \
728 * The edge descending from 'elm' away from 'parent' \
729 * is short. Rotate to make 'parent' a child of 'elm', \
730 * then lengthen the short edges descending from \
731 * 'parent' and 'elm' to rebalance. \
732 * \
733 * par elm \
734 * / \ / \ \
735 * e \ / \ \
736 * elm / \ \
737 * / \ par s \
738 * / \ / \ \
739 * / \ e \ \
740 * x s x \
741 */ \
742 RB_ROTATE(parent, elm, elmdir, field); \
743 RB_SET_PARENT(elm, gpar, field); \
744 RB_SWAP_CHILD(head, gpar, parent, elm, field); \
745 /* \
746 * An element rotated down, but not into the search \
747 * path has a new, smaller subtree, so update \
748 * augmentation for it. \
749 */ \
750 if (sib != elm) \
751 (void)RB_AUGMENT_CHECK(sib); \
752 return (parent); \
753 } while (elm = parent, (parent = gpar) != NULL); \
754 return (NULL); \
755}
756
757#define _RB_AUGMENT_WALK(elm, match, field) \
758do { \
759 if (match == elm) \
760 match = NULL; \
761} while (RB_AUGMENT_CHECK(elm) && \
762 (elm = RB_PARENT(elm, field)) != NULL)
763
764#define RB_GENERATE_REMOVE(name, type, field, attr) \
765attr struct type * \
766name##_RB_REMOVE(struct name *head, struct type *out) \
767{ \
768 struct type *child, *in, *opar, *parent; \
769 \
770 child = RB_LEFT(out, field); \
771 in = RB_RIGHT(out, field); \
772 opar = _RB_UP(out, field); \
773 if (in == NULL || child == NULL) { \
774 in = child = (in == NULL ? child : in); \
775 parent = opar = _RB_PTR(opar); \
776 } else { \
777 parent = in; \
778 while (RB_LEFT(in, field)) \
779 in = RB_LEFT(in, field); \
780 RB_SET_PARENT(child, in, field); \
781 RB_LEFT(in, field) = child; \
782 child = RB_RIGHT(in, field); \
783 if (parent != in) { \
784 RB_SET_PARENT(parent, in, field); \
785 RB_RIGHT(in, field) = parent; \
786 parent = RB_PARENT(in, field); \
787 RB_LEFT(parent, field) = child; \
788 } \
789 _RB_UP(in, field) = opar; \
790 opar = _RB_PTR(opar); \
791 } \
792 RB_SWAP_CHILD(head, opar, out, in, field); \
793 if (child != NULL) \
794 _RB_UP(child, field) = parent; \
795 if (parent != NULL) { \
796 opar = name##_RB_REMOVE_COLOR(head, parent, child); \
797 /* if rotation has made 'parent' the root of the same \
798 * subtree as before, don't re-augment it. */ \
799 if (parent == in && RB_LEFT(parent, field) == NULL) { \
800 opar = NULL; \
801 parent = RB_PARENT(parent, field); \
802 } \
803 _RB_AUGMENT_WALK(parent, opar, field); \
804 if (opar != NULL) { \
805 /* \
806 * Elements rotated into the search path have \
807 * changed subtrees, so update augmentation for \
808 * them if AUGMENT_WALK didn't. \
809 */ \
810 (void)RB_AUGMENT_CHECK(opar); \
811 (void)RB_AUGMENT_CHECK(RB_PARENT(opar, field)); \
812 } \
813 } \
814 return (out); \
815}
816
817#define RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
818/* Inserts a node into the RB tree */ \
819attr struct type * \
820name##_RB_INSERT_FINISH(struct name *head, struct type *parent, \
821 struct type **pptr, struct type *elm) \
822{ \
823 struct type *tmp = NULL; \
824 \
825 RB_SET(elm, parent, field); \
826 *pptr = elm; \
827 if (parent != NULL) \
828 tmp = name##_RB_INSERT_COLOR(head, parent, elm); \
829 _RB_AUGMENT_WALK(elm, tmp, field); \
830 if (tmp != NULL) \
831 /* \
832 * An element rotated into the search path has a \
833 * changed subtree, so update augmentation for it if \
834 * AUGMENT_WALK didn't. \
835 */ \
836 (void)RB_AUGMENT_CHECK(tmp); \
837 return (NULL); \
838}
839
840#define RB_GENERATE_INSERT(name, type, field, cmp, attr) \
841/* Inserts a node into the RB tree */ \
842attr struct type * \
843name##_RB_INSERT(struct name *head, struct type *elm) \
844{ \
845 struct type *tmp; \
846 struct type **tmpp = &RB_ROOT(head); \
847 struct type *parent = NULL; \
848 \
849 while ((tmp = *tmpp) != NULL) { \
850 parent = tmp; \
851 __typeof(cmp(NULL, NULL)) comp = (cmp)(elm, parent); \
852 if (comp < 0) \
853 tmpp = &RB_LEFT(parent, field); \
854 else if (comp > 0) \
855 tmpp = &RB_RIGHT(parent, field); \
856 else \
857 return (parent); \
858 } \
859 return (name##_RB_INSERT_FINISH(head, parent, tmpp, elm)); \
860}
861
862#define RB_GENERATE_FIND(name, type, field, cmp, attr) \
863/* Finds the node with the same key as elm */ \
864attr struct type * \
865name##_RB_FIND(struct name *head, struct type *elm) \
866{ \
867 struct type *tmp = RB_ROOT(head); \
868 __typeof(cmp(NULL, NULL)) comp; \
869 while (tmp) { \
870 comp = cmp(elm, tmp); \
871 if (comp < 0) \
872 tmp = RB_LEFT(tmp, field); \
873 else if (comp > 0) \
874 tmp = RB_RIGHT(tmp, field); \
875 else \
876 return (tmp); \
877 } \
878 return (NULL); \
879}
880
881#define RB_GENERATE_NFIND(name, type, field, cmp, attr) \
882/* Finds the first node greater than or equal to the search key */ \
883attr struct type * \
884name##_RB_NFIND(struct name *head, struct type *elm) \
885{ \
886 struct type *tmp = RB_ROOT(head); \
887 struct type *res = NULL; \
888 __typeof(cmp(NULL, NULL)) comp; \
889 while (tmp) { \
890 comp = cmp(elm, tmp); \
891 if (comp < 0) { \
892 res = tmp; \
893 tmp = RB_LEFT(tmp, field); \
894 } \
895 else if (comp > 0) \
896 tmp = RB_RIGHT(tmp, field); \
897 else \
898 return (tmp); \
899 } \
900 return (res); \
901}
902
903#define RB_GENERATE_NEXT(name, type, field, attr) \
904/* ARGSUSED */ \
905attr struct type * \
906name##_RB_NEXT(struct type *elm) \
907{ \
908 if (RB_RIGHT(elm, field)) { \
909 elm = RB_RIGHT(elm, field); \
910 while (RB_LEFT(elm, field)) \
911 elm = RB_LEFT(elm, field); \
912 } else { \
913 while (RB_PARENT(elm, field) && \
914 (elm == RB_RIGHT(RB_PARENT(elm, field), field))) \
915 elm = RB_PARENT(elm, field); \
916 elm = RB_PARENT(elm, field); \
917 } \
918 return (elm); \
919}
920
921#if defined(_KERNEL) && defined(DIAGNOSTIC)
922#define _RB_ORDER_CHECK(cmp, lo, hi) do { \
923 KASSERT((cmp)(lo, hi) < 0, ("out of order insertion")); \
924} while (0)
925#else
926#define _RB_ORDER_CHECK(cmp, lo, hi) do {} while (0)
927#endif
928
929#define RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
930/* Inserts a node into the next position in the RB tree */ \
931attr struct type * \
932name##_RB_INSERT_NEXT(struct name *head, \
933 struct type *elm, struct type *next) \
934{ \
935 struct type *tmp; \
936 struct type **tmpp = &RB_RIGHT(elm, field); \
937 \
938 _RB_ORDER_CHECK(cmp, elm, next); \
939 if (name##_RB_NEXT(elm) != NULL) \
940 _RB_ORDER_CHECK(cmp, next, name##_RB_NEXT(elm)); \
941 while ((tmp = *tmpp) != NULL) { \
942 elm = tmp; \
943 tmpp = &RB_LEFT(elm, field); \
944 } \
945 return (name##_RB_INSERT_FINISH(head, elm, tmpp, next)); \
946}
947
948#define RB_GENERATE_PREV(name, type, field, attr) \
949/* ARGSUSED */ \
950attr struct type * \
951name##_RB_PREV(struct type *elm) \
952{ \
953 if (RB_LEFT(elm, field)) { \
954 elm = RB_LEFT(elm, field); \
955 while (RB_RIGHT(elm, field)) \
956 elm = RB_RIGHT(elm, field); \
957 } else { \
958 while (RB_PARENT(elm, field) && \
959 (elm == RB_LEFT(RB_PARENT(elm, field), field))) \
960 elm = RB_PARENT(elm, field); \
961 elm = RB_PARENT(elm, field); \
962 } \
963 return (elm); \
964}
965
966#define RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
967/* Inserts a node into the prev position in the RB tree */ \
968attr struct type * \
969name##_RB_INSERT_PREV(struct name *head, \
970 struct type *elm, struct type *prev) \
971{ \
972 struct type *tmp; \
973 struct type **tmpp = &RB_LEFT(elm, field); \
974 \
975 _RB_ORDER_CHECK(cmp, prev, elm); \
976 if (name##_RB_PREV(elm) != NULL) \
977 _RB_ORDER_CHECK(cmp, name##_RB_PREV(elm), prev); \
978 while ((tmp = *tmpp) != NULL) { \
979 elm = tmp; \
980 tmpp = &RB_RIGHT(elm, field); \
981 } \
982 return (name##_RB_INSERT_FINISH(head, elm, tmpp, prev)); \
983}
984
985#define RB_GENERATE_MINMAX(name, type, field, attr) \
986attr struct type * \
987name##_RB_MINMAX(struct name *head, int val) \
988{ \
989 struct type *tmp = RB_ROOT(head); \
990 struct type *parent = NULL; \
991 while (tmp) { \
992 parent = tmp; \
993 if (val < 0) \
994 tmp = RB_LEFT(tmp, field); \
995 else \
996 tmp = RB_RIGHT(tmp, field); \
997 } \
998 return (parent); \
999}
1000
1001#define RB_GENERATE_REINSERT(name, type, field, cmp, attr) \
1002attr struct type * \
1003name##_RB_REINSERT(struct name *head, struct type *elm) \
1004{ \
1005 struct type *cmpelm; \
1006 if (((cmpelm = RB_PREV(name, head, elm)) != NULL && \
1007 cmp(cmpelm, elm) >= 0) || \
1008 ((cmpelm = RB_NEXT(name, head, elm)) != NULL && \
1009 cmp(elm, cmpelm) >= 0)) { \
1010 /* XXXLAS: Remove/insert is heavy handed. */ \
1011 RB_REMOVE(name, head, elm); \
1012 return (RB_INSERT(name, head, elm)); \
1013 } \
1014 return (NULL); \
1015} \
1016
1017#define RB_NEGINF -1
1018#define RB_INF 1
1019
1020#define RB_INSERT(name, x, y) name##_RB_INSERT(x, y)
1021#define RB_INSERT_NEXT(name, x, y, z) name##_RB_INSERT_NEXT(x, y, z)
1022#define RB_INSERT_PREV(name, x, y, z) name##_RB_INSERT_PREV(x, y, z)
1023#define RB_REMOVE(name, x, y) name##_RB_REMOVE(x, y)
1024#define RB_FIND(name, x, y) name##_RB_FIND(x, y)
1025#define RB_NFIND(name, x, y) name##_RB_NFIND(x, y)
1026#define RB_NEXT(name, x, y) name##_RB_NEXT(y)
1027#define RB_PREV(name, x, y) name##_RB_PREV(y)
1028#define RB_MIN(name, x) name##_RB_MINMAX(x, RB_NEGINF)
1029#define RB_MAX(name, x) name##_RB_MINMAX(x, RB_INF)
1030#define RB_REINSERT(name, x, y) name##_RB_REINSERT(x, y)
1031
1032#define RB_FOREACH(x, name, head) \
1033 for ((x) = RB_MIN(name, head); \
1034 (x) != NULL; \
1035 (x) = name##_RB_NEXT(x))
1036
1037#define RB_FOREACH_FROM(x, name, y) \
1038 for ((x) = (y); \
1039 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1040 (x) = (y))
1041
1042#define RB_FOREACH_SAFE(x, name, head, y) \
1043 for ((x) = RB_MIN(name, head); \
1044 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1045 (x) = (y))
1046
1047#define RB_FOREACH_REVERSE(x, name, head) \
1048 for ((x) = RB_MAX(name, head); \
1049 (x) != NULL; \
1050 (x) = name##_RB_PREV(x))
1051
1052#define RB_FOREACH_REVERSE_FROM(x, name, y) \
1053 for ((x) = (y); \
1054 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1055 (x) = (y))
1056
1057#define RB_FOREACH_REVERSE_SAFE(x, name, head, y) \
1058 for ((x) = RB_MAX(name, head); \
1059 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1060 (x) = (y))
1061
1062#endif /* _SYS_TREE_H_ */