xref: /freebsd/sys/sys/tree.h (revision c44d2663a790b31bc1cd08958b45a661487c0287)
1 /*	$NetBSD: tree.h,v 1.8 2004/03/28 19:38:30 provos Exp $	*/
2 /*	$OpenBSD: tree.h,v 1.7 2002/10/17 21:51:54 art Exp $	*/
3 
4 /*-
5  * SPDX-License-Identifier: BSD-2-Clause
6  *
7  * Copyright 2002 Niels Provos <provos@citi.umich.edu>
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
20  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
21  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
23  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
24  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30 
31 #ifndef	_SYS_TREE_H_
32 #define	_SYS_TREE_H_
33 
34 #include <sys/cdefs.h>
35 
36 /*
37  * This file defines data structures for different types of trees:
38  * splay trees and rank-balanced trees.
39  *
40  * A splay tree is a self-organizing data structure.  Every operation
41  * on the tree causes a splay to happen.  The splay moves the requested
42  * node to the root of the tree and partly rebalances it.
43  *
44  * This has the benefit that request locality causes faster lookups as
45  * the requested nodes move to the top of the tree.  On the other hand,
46  * every lookup causes memory writes.
47  *
48  * The Balance Theorem bounds the total access time for m operations
49  * and n inserts on an initially empty tree as O((m + n)lg n).  The
50  * amortized cost for a sequence of m accesses to a splay tree is O(lg n);
51  *
52  * A rank-balanced tree is a binary search tree with an integer
53  * rank-difference as an attribute of each pointer from parent to child.
54  * The sum of the rank-differences on any path from a node down to null is
55  * the same, and defines the rank of that node. The rank of the null node
56  * is -1.
57  *
58  * Different additional conditions define different sorts of balanced trees,
59  * including "red-black" and "AVL" trees.  The set of conditions applied here
60  * are the "weak-AVL" conditions of Haeupler, Sen and Tarjan presented in in
61  * "Rank Balanced Trees", ACM Transactions on Algorithms Volume 11 Issue 4 June
62  * 2015 Article No.: 30pp 1–26 https://doi.org/10.1145/2689412 (the HST paper):
63  *	- every rank-difference is 1 or 2.
64  *	- the rank of any leaf is 1.
65  *
66  * For historical reasons, rank differences that are even are associated
67  * with the color red (Rank-Even-Difference), and the child that a red edge
68  * points to is called a red child.
69  *
70  * Every operation on a rank-balanced tree is bounded as O(lg n).
71  * The maximum height of a rank-balanced tree is 2lg (n+1).
72  */
73 
74 #define SPLAY_HEAD(name, type)						\
75 struct name {								\
76 	struct type *sph_root; /* root of the tree */			\
77 }
78 
79 #define SPLAY_INITIALIZER(root)						\
80 	{ NULL }
81 
82 #define SPLAY_INIT(root) do {						\
83 	(root)->sph_root = NULL;					\
84 } while (/*CONSTCOND*/ 0)
85 
86 #define SPLAY_ENTRY(type)						\
87 struct {								\
88 	struct type *spe_left; /* left element */			\
89 	struct type *spe_right; /* right element */			\
90 }
91 
92 #define SPLAY_LEFT(elm, field)		(elm)->field.spe_left
93 #define SPLAY_RIGHT(elm, field)		(elm)->field.spe_right
94 #define SPLAY_ROOT(head)		(head)->sph_root
95 #define SPLAY_EMPTY(head)		(SPLAY_ROOT(head) == NULL)
96 
97 /* SPLAY_ROTATE_{LEFT,RIGHT} expect that tmp hold SPLAY_{RIGHT,LEFT} */
98 #define SPLAY_ROTATE_RIGHT(head, tmp, field) do {			\
99 	SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(tmp, field);	\
100 	SPLAY_RIGHT(tmp, field) = (head)->sph_root;			\
101 	(head)->sph_root = tmp;						\
102 } while (/*CONSTCOND*/ 0)
103 
104 #define SPLAY_ROTATE_LEFT(head, tmp, field) do {			\
105 	SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(tmp, field);	\
106 	SPLAY_LEFT(tmp, field) = (head)->sph_root;			\
107 	(head)->sph_root = tmp;						\
108 } while (/*CONSTCOND*/ 0)
109 
110 #define SPLAY_LINKLEFT(head, tmp, field) do {				\
111 	SPLAY_LEFT(tmp, field) = (head)->sph_root;			\
112 	tmp = (head)->sph_root;						\
113 	(head)->sph_root = SPLAY_LEFT((head)->sph_root, field);		\
114 } while (/*CONSTCOND*/ 0)
115 
116 #define SPLAY_LINKRIGHT(head, tmp, field) do {				\
117 	SPLAY_RIGHT(tmp, field) = (head)->sph_root;			\
118 	tmp = (head)->sph_root;						\
119 	(head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);	\
120 } while (/*CONSTCOND*/ 0)
121 
122 #define SPLAY_ASSEMBLE(head, node, left, right, field) do {		\
123 	SPLAY_RIGHT(left, field) = SPLAY_LEFT((head)->sph_root, field);	\
124 	SPLAY_LEFT(right, field) = SPLAY_RIGHT((head)->sph_root, field);\
125 	SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(node, field);	\
126 	SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(node, field);	\
127 } while (/*CONSTCOND*/ 0)
128 
129 /* Generates prototypes and inline functions */
130 
131 #define SPLAY_PROTOTYPE(name, type, field, cmp)				\
132 void name##_SPLAY(struct name *, struct type *);			\
133 void name##_SPLAY_MINMAX(struct name *, int);				\
134 struct type *name##_SPLAY_INSERT(struct name *, struct type *);		\
135 struct type *name##_SPLAY_REMOVE(struct name *, struct type *);		\
136 									\
137 /* Finds the node with the same key as elm */				\
138 static __unused __inline struct type *					\
139 name##_SPLAY_FIND(struct name *head, struct type *elm)			\
140 {									\
141 	if (SPLAY_EMPTY(head))						\
142 		return(NULL);						\
143 	name##_SPLAY(head, elm);					\
144 	if ((cmp)(elm, (head)->sph_root) == 0)				\
145 		return (head->sph_root);				\
146 	return (NULL);							\
147 }									\
148 									\
149 static __unused __inline struct type *					\
150 name##_SPLAY_NEXT(struct name *head, struct type *elm)			\
151 {									\
152 	name##_SPLAY(head, elm);					\
153 	if (SPLAY_RIGHT(elm, field) != NULL) {				\
154 		elm = SPLAY_RIGHT(elm, field);				\
155 		while (SPLAY_LEFT(elm, field) != NULL) {		\
156 			elm = SPLAY_LEFT(elm, field);			\
157 		}							\
158 	} else								\
159 		elm = NULL;						\
160 	return (elm);							\
161 }									\
162 									\
163 static __unused __inline struct type *					\
164 name##_SPLAY_MIN_MAX(struct name *head, int val)			\
165 {									\
166 	name##_SPLAY_MINMAX(head, val);					\
167 	return (SPLAY_ROOT(head));					\
168 }
169 
170 /* Main splay operation.
171  * Moves node close to the key of elm to top
172  */
173 #define SPLAY_GENERATE(name, type, field, cmp)				\
174 struct type *								\
175 name##_SPLAY_INSERT(struct name *head, struct type *elm)		\
176 {									\
177     if (SPLAY_EMPTY(head)) {						\
178 	    SPLAY_LEFT(elm, field) = SPLAY_RIGHT(elm, field) = NULL;	\
179     } else {								\
180 	    __typeof(cmp(NULL, NULL)) __comp;				\
181 	    name##_SPLAY(head, elm);					\
182 	    __comp = (cmp)(elm, (head)->sph_root);			\
183 	    if (__comp < 0) {						\
184 		    SPLAY_LEFT(elm, field) = SPLAY_LEFT((head)->sph_root, field);\
185 		    SPLAY_RIGHT(elm, field) = (head)->sph_root;		\
186 		    SPLAY_LEFT((head)->sph_root, field) = NULL;		\
187 	    } else if (__comp > 0) {					\
188 		    SPLAY_RIGHT(elm, field) = SPLAY_RIGHT((head)->sph_root, field);\
189 		    SPLAY_LEFT(elm, field) = (head)->sph_root;		\
190 		    SPLAY_RIGHT((head)->sph_root, field) = NULL;	\
191 	    } else							\
192 		    return ((head)->sph_root);				\
193     }									\
194     (head)->sph_root = (elm);						\
195     return (NULL);							\
196 }									\
197 									\
198 struct type *								\
199 name##_SPLAY_REMOVE(struct name *head, struct type *elm)		\
200 {									\
201 	struct type *__tmp;						\
202 	if (SPLAY_EMPTY(head))						\
203 		return (NULL);						\
204 	name##_SPLAY(head, elm);					\
205 	if ((cmp)(elm, (head)->sph_root) == 0) {			\
206 		if (SPLAY_LEFT((head)->sph_root, field) == NULL) {	\
207 			(head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);\
208 		} else {						\
209 			__tmp = SPLAY_RIGHT((head)->sph_root, field);	\
210 			(head)->sph_root = SPLAY_LEFT((head)->sph_root, field);\
211 			name##_SPLAY(head, elm);			\
212 			SPLAY_RIGHT((head)->sph_root, field) = __tmp;	\
213 		}							\
214 		return (elm);						\
215 	}								\
216 	return (NULL);							\
217 }									\
218 									\
219 void									\
220 name##_SPLAY(struct name *head, struct type *elm)			\
221 {									\
222 	struct type __node, *__left, *__right, *__tmp;			\
223 	__typeof(cmp(NULL, NULL)) __comp;				\
224 \
225 	SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
226 	__left = __right = &__node;					\
227 \
228 	while ((__comp = (cmp)(elm, (head)->sph_root)) != 0) {		\
229 		if (__comp < 0) {					\
230 			__tmp = SPLAY_LEFT((head)->sph_root, field);	\
231 			if (__tmp == NULL)				\
232 				break;					\
233 			if ((cmp)(elm, __tmp) < 0){			\
234 				SPLAY_ROTATE_RIGHT(head, __tmp, field);	\
235 				if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
236 					break;				\
237 			}						\
238 			SPLAY_LINKLEFT(head, __right, field);		\
239 		} else if (__comp > 0) {				\
240 			__tmp = SPLAY_RIGHT((head)->sph_root, field);	\
241 			if (__tmp == NULL)				\
242 				break;					\
243 			if ((cmp)(elm, __tmp) > 0){			\
244 				SPLAY_ROTATE_LEFT(head, __tmp, field);	\
245 				if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
246 					break;				\
247 			}						\
248 			SPLAY_LINKRIGHT(head, __left, field);		\
249 		}							\
250 	}								\
251 	SPLAY_ASSEMBLE(head, &__node, __left, __right, field);		\
252 }									\
253 									\
254 /* Splay with either the minimum or the maximum element			\
255  * Used to find minimum or maximum element in tree.			\
256  */									\
257 void name##_SPLAY_MINMAX(struct name *head, int __comp) \
258 {									\
259 	struct type __node, *__left, *__right, *__tmp;			\
260 \
261 	SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
262 	__left = __right = &__node;					\
263 \
264 	while (1) {							\
265 		if (__comp < 0) {					\
266 			__tmp = SPLAY_LEFT((head)->sph_root, field);	\
267 			if (__tmp == NULL)				\
268 				break;					\
269 			if (__comp < 0){				\
270 				SPLAY_ROTATE_RIGHT(head, __tmp, field);	\
271 				if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
272 					break;				\
273 			}						\
274 			SPLAY_LINKLEFT(head, __right, field);		\
275 		} else if (__comp > 0) {				\
276 			__tmp = SPLAY_RIGHT((head)->sph_root, field);	\
277 			if (__tmp == NULL)				\
278 				break;					\
279 			if (__comp > 0) {				\
280 				SPLAY_ROTATE_LEFT(head, __tmp, field);	\
281 				if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
282 					break;				\
283 			}						\
284 			SPLAY_LINKRIGHT(head, __left, field);		\
285 		}							\
286 	}								\
287 	SPLAY_ASSEMBLE(head, &__node, __left, __right, field);		\
288 }
289 
290 #define SPLAY_NEGINF	-1
291 #define SPLAY_INF	1
292 
293 #define SPLAY_INSERT(name, x, y)	name##_SPLAY_INSERT(x, y)
294 #define SPLAY_REMOVE(name, x, y)	name##_SPLAY_REMOVE(x, y)
295 #define SPLAY_FIND(name, x, y)		name##_SPLAY_FIND(x, y)
296 #define SPLAY_NEXT(name, x, y)		name##_SPLAY_NEXT(x, y)
297 #define SPLAY_MIN(name, x)		(SPLAY_EMPTY(x) ? NULL	\
298 					: name##_SPLAY_MIN_MAX(x, SPLAY_NEGINF))
299 #define SPLAY_MAX(name, x)		(SPLAY_EMPTY(x) ? NULL	\
300 					: name##_SPLAY_MIN_MAX(x, SPLAY_INF))
301 
302 #define SPLAY_FOREACH(x, name, head)					\
303 	for ((x) = SPLAY_MIN(name, head);				\
304 	     (x) != NULL;						\
305 	     (x) = SPLAY_NEXT(name, head, x))
306 
307 /* Macros that define a rank-balanced tree */
308 #define RB_HEAD(name, type)						\
309 struct name {								\
310 	struct type *rbh_root; /* root of the tree */			\
311 }
312 
313 #define RB_INITIALIZER(root)						\
314 	{ NULL }
315 
316 #define RB_INIT(root) do {						\
317 	(root)->rbh_root = NULL;					\
318 } while (/*CONSTCOND*/ 0)
319 
320 #define RB_ENTRY(type)							\
321 struct {								\
322 	struct type *rbe_link[3];					\
323 }
324 
325 /*
326  * With the expectation that any object of struct type has an
327  * address that is a multiple of 4, and that therefore the
328  * 2 least significant bits of a pointer to struct type are
329  * always zero, this implementation sets those bits to indicate
330  * that the left or right child of the tree node is "red".
331  */
332 #define _RB_LINK(elm, dir, field)	(elm)->field.rbe_link[dir]
333 #define _RB_UP(elm, field)		_RB_LINK(elm, 0, field)
334 #define _RB_L				((__uintptr_t)1)
335 #define _RB_R				((__uintptr_t)2)
336 #define _RB_LR				((__uintptr_t)3)
337 #define _RB_BITS(elm)			((__uintptr_t)elm)
338 #define _RB_BITSUP(elm, field)		_RB_BITS(_RB_UP(elm, field))
339 #define _RB_PTR_OP(elm, op, dir)	((__typeof(elm))		\
340 					((__uintptr_t)(elm) op (dir)))
341 #define _RB_PTR(elm)			_RB_PTR_OP((elm), &, ~_RB_LR)
342 #define _RB_MOD_OR(elm, dir)		((elm) = _RB_PTR_OP((elm), |, (dir)))
343 #define _RB_MOD_XOR(elm, dir)		((elm) = _RB_PTR_OP((elm), ^, (dir)))
344 
345 #define RB_PARENT(elm, field)		_RB_PTR(_RB_UP(elm, field))
346 #define RB_LEFT(elm, field)		_RB_LINK(elm, _RB_L, field)
347 #define RB_RIGHT(elm, field)		_RB_LINK(elm, _RB_R, field)
348 #define RB_ROOT(head)			(head)->rbh_root
349 #define RB_EMPTY(head)			(RB_ROOT(head) == NULL)
350 
351 #define RB_SET_PARENT(dst, src, field) do {				\
352 	_RB_UP(dst, field) = (__typeof(src))((__uintptr_t)src |		\
353 	    (_RB_BITSUP(dst, field) & _RB_LR));				\
354 } while (/*CONSTCOND*/ 0)
355 
356 #define RB_SET(elm, parent, field) do {					\
357 	_RB_UP(elm, field) = parent;					\
358 	RB_LEFT(elm, field) = RB_RIGHT(elm, field) = NULL;		\
359 } while (/*CONSTCOND*/ 0)
360 
361 /*
362  * Either RB_AUGMENT or RB_AUGMENT_CHECK is invoked in a loop at the root of
363  * every modified subtree, from the bottom up to the root, to update augmented
364  * node data.  RB_AUGMENT_CHECK returns true only when the update changes the
365  * node data, so that updating can be stopped short of the root when it returns
366  * false.
367  */
368 #ifndef RB_AUGMENT_CHECK
369 #ifndef RB_AUGMENT
370 #define RB_AUGMENT_CHECK(x) 0
371 #else
372 #define RB_AUGMENT_CHECK(x) (RB_AUGMENT(x), 1)
373 #endif
374 #endif
375 
376 #define RB_UPDATE_AUGMENT(elm, field) do {				\
377 	__typeof(elm) rb_update_tmp = (elm);				\
378 	while (RB_AUGMENT_CHECK(rb_update_tmp) &&			\
379 	    (rb_update_tmp = RB_PARENT(rb_update_tmp, field)) != NULL)	\
380 		;							\
381 } while (0)
382 
383 #define RB_SWAP_CHILD(head, par, out, in, field) do {			\
384 	if (par == NULL)						\
385 		RB_ROOT(head) = (in);					\
386 	else if ((out) == RB_LEFT(par, field))				\
387 		RB_LEFT(par, field) = (in);				\
388 	else								\
389 		RB_RIGHT(par, field) = (in);				\
390 } while (/*CONSTCOND*/ 0)
391 
392 /*
393  * RB_ROTATE macro partially restructures the tree to improve balance. In the
394  * case when dir is _RB_L, tmp is a right child of elm.  After rotation, elm
395  * is a left child of tmp, and the subtree that represented the items between
396  * them, which formerly hung to the left of tmp now hangs to the right of elm.
397  * The parent-child relationship between elm and its former parent is not
398  * changed; where this macro once updated those fields, that is now left to the
399  * caller of RB_ROTATE to clean up, so that a pair of rotations does not twice
400  * update the same pair of pointer fields with distinct values.
401  */
402 #define RB_ROTATE(elm, tmp, dir, field) do {				\
403 	if ((_RB_LINK(elm, dir ^ _RB_LR, field) =			\
404 	    _RB_LINK(tmp, dir, field)) != NULL)				\
405 		RB_SET_PARENT(_RB_LINK(tmp, dir, field), elm, field);	\
406 	_RB_LINK(tmp, dir, field) = (elm);				\
407 	RB_SET_PARENT(elm, tmp, field);					\
408 } while (/*CONSTCOND*/ 0)
409 
410 /* Generates prototypes and inline functions */
411 #define	RB_PROTOTYPE(name, type, field, cmp)				\
412 	RB_PROTOTYPE_INTERNAL(name, type, field, cmp,)
413 #define	RB_PROTOTYPE_STATIC(name, type, field, cmp)			\
414 	RB_PROTOTYPE_INTERNAL(name, type, field, cmp, __unused static)
415 #define RB_PROTOTYPE_INTERNAL(name, type, field, cmp, attr)		\
416 	RB_PROTOTYPE_RANK(name, type, attr)				\
417 	RB_PROTOTYPE_INSERT_COLOR(name, type, attr);			\
418 	RB_PROTOTYPE_REMOVE_COLOR(name, type, attr);			\
419 	RB_PROTOTYPE_INSERT_FINISH(name, type, attr);			\
420 	RB_PROTOTYPE_INSERT(name, type, attr);				\
421 	RB_PROTOTYPE_REMOVE(name, type, attr);				\
422 	RB_PROTOTYPE_FIND(name, type, attr);				\
423 	RB_PROTOTYPE_NFIND(name, type, attr);				\
424 	RB_PROTOTYPE_NEXT(name, type, attr);				\
425 	RB_PROTOTYPE_INSERT_NEXT(name, type, attr);			\
426 	RB_PROTOTYPE_PREV(name, type, attr);				\
427 	RB_PROTOTYPE_INSERT_PREV(name, type, attr);			\
428 	RB_PROTOTYPE_MINMAX(name, type, attr);				\
429 	RB_PROTOTYPE_REINSERT(name, type, attr);
430 #ifdef _RB_DIAGNOSTIC
431 #define RB_PROTOTYPE_RANK(name, type, attr)				\
432 	attr int name##_RB_RANK(struct type *);
433 #else
434 #define RB_PROTOTYPE_RANK(name, type, attr)
435 #endif
436 #define RB_PROTOTYPE_INSERT_COLOR(name, type, attr)			\
437 	attr struct type *name##_RB_INSERT_COLOR(struct name *,		\
438 	    struct type *, struct type *)
439 #define RB_PROTOTYPE_REMOVE_COLOR(name, type, attr)			\
440 	attr struct type *name##_RB_REMOVE_COLOR(struct name *,		\
441 	    struct type *, struct type *)
442 #define RB_PROTOTYPE_REMOVE(name, type, attr)				\
443 	attr struct type *name##_RB_REMOVE(struct name *, struct type *)
444 #define RB_PROTOTYPE_INSERT_FINISH(name, type, attr)			\
445 	attr struct type *name##_RB_INSERT_FINISH(struct name *,	\
446 	    struct type *, struct type **, struct type *)
447 #define RB_PROTOTYPE_INSERT(name, type, attr)				\
448 	attr struct type *name##_RB_INSERT(struct name *, struct type *)
449 #define RB_PROTOTYPE_FIND(name, type, attr)				\
450 	attr struct type *name##_RB_FIND(struct name *, struct type *)
451 #define RB_PROTOTYPE_NFIND(name, type, attr)				\
452 	attr struct type *name##_RB_NFIND(struct name *, struct type *)
453 #define RB_PROTOTYPE_NEXT(name, type, attr)				\
454 	attr struct type *name##_RB_NEXT(struct type *)
455 #define RB_PROTOTYPE_INSERT_NEXT(name, type, attr)			\
456 	attr struct type *name##_RB_INSERT_NEXT(struct name *,		\
457 	    struct type *, struct type *)
458 #define RB_PROTOTYPE_PREV(name, type, attr)				\
459 	attr struct type *name##_RB_PREV(struct type *)
460 #define RB_PROTOTYPE_INSERT_PREV(name, type, attr)			\
461 	attr struct type *name##_RB_INSERT_PREV(struct name *,		\
462 	    struct type *, struct type *)
463 #define RB_PROTOTYPE_MINMAX(name, type, attr)				\
464 	attr struct type *name##_RB_MINMAX(struct name *, int)
465 #define RB_PROTOTYPE_REINSERT(name, type, attr)			\
466 	attr struct type *name##_RB_REINSERT(struct name *, struct type *)
467 
468 /* Main rb operation.
469  * Moves node close to the key of elm to top
470  */
471 #define	RB_GENERATE(name, type, field, cmp)				\
472 	RB_GENERATE_INTERNAL(name, type, field, cmp,)
473 #define	RB_GENERATE_STATIC(name, type, field, cmp)			\
474 	RB_GENERATE_INTERNAL(name, type, field, cmp, __unused static)
475 #define RB_GENERATE_INTERNAL(name, type, field, cmp, attr)		\
476 	RB_GENERATE_RANK(name, type, field, attr)			\
477 	RB_GENERATE_INSERT_COLOR(name, type, field, attr)		\
478 	RB_GENERATE_REMOVE_COLOR(name, type, field, attr)		\
479 	RB_GENERATE_INSERT_FINISH(name, type, field, attr)		\
480 	RB_GENERATE_INSERT(name, type, field, cmp, attr)		\
481 	RB_GENERATE_REMOVE(name, type, field, attr)			\
482 	RB_GENERATE_FIND(name, type, field, cmp, attr)			\
483 	RB_GENERATE_NFIND(name, type, field, cmp, attr)			\
484 	RB_GENERATE_NEXT(name, type, field, attr)			\
485 	RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr)		\
486 	RB_GENERATE_PREV(name, type, field, attr)			\
487 	RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr)		\
488 	RB_GENERATE_MINMAX(name, type, field, attr)			\
489 	RB_GENERATE_REINSERT(name, type, field, cmp, attr)
490 
491 #ifdef _RB_DIAGNOSTIC
492 #ifndef RB_AUGMENT
493 #define _RB_AUGMENT_VERIFY(x) RB_AUGMENT_CHECK(x)
494 #else
495 #define _RB_AUGMENT_VERIFY(x) 0
496 #endif
497 #define RB_GENERATE_RANK(name, type, field, attr)			\
498 /*									\
499  * Return the rank of the subtree rooted at elm, or -1 if the subtree	\
500  * is not rank-balanced, or has inconsistent augmentation data.
501  */									\
502 attr int								\
503 name##_RB_RANK(struct type *elm)					\
504 {									\
505 	struct type *left, *right, *up;					\
506 	int left_rank, right_rank;					\
507 									\
508 	if (elm == NULL)						\
509 		return (0);						\
510 	up = _RB_UP(elm, field);					\
511 	left = RB_LEFT(elm, field);					\
512 	left_rank = ((_RB_BITS(up) & _RB_L) ? 2 : 1) +			\
513 	    name##_RB_RANK(left);					\
514 	right = RB_RIGHT(elm, field);					\
515 	right_rank = ((_RB_BITS(up) & _RB_R) ? 2 : 1) +			\
516 	    name##_RB_RANK(right);					\
517 	if (left_rank != right_rank ||					\
518 	    (left_rank == 2 && left == NULL && right == NULL) ||	\
519 	    _RB_AUGMENT_VERIFY(elm))					\
520 		return (-1);						\
521 	return (left_rank);						\
522 }
523 #else
524 #define RB_GENERATE_RANK(name, type, field, attr)
525 #endif
526 
527 #define RB_GENERATE_INSERT_COLOR(name, type, field, attr)		\
528 attr struct type *							\
529 name##_RB_INSERT_COLOR(struct name *head,				\
530     struct type *parent, struct type *elm)				\
531 {									\
532 	/*								\
533 	 * Initially, elm is a leaf.  Either its parent was previously	\
534 	 * a leaf, with two black null children, or an interior node	\
535 	 * with a black non-null child and a red null child. The        \
536 	 * balance criterion "the rank of any leaf is 1" precludes the  \
537 	 * possibility of two red null children for the initial parent. \
538 	 * So the first loop iteration cannot lead to accessing an      \
539 	 * uninitialized 'child', and a later iteration can only happen \
540 	 * when a value has been assigned to 'child' in the previous    \
541 	 * one.								\
542 	 */								\
543 	struct type *child, *child_up, *gpar;				\
544 	__uintptr_t elmdir, sibdir;					\
545 									\
546 	do {								\
547 		/* the rank of the tree rooted at elm grew */		\
548 		gpar = _RB_UP(parent, field);				\
549 		elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
550 		if (_RB_BITS(gpar) & elmdir) {				\
551 			/* shorten the parent-elm edge to rebalance */	\
552 			_RB_MOD_XOR(_RB_UP(parent, field), elmdir);	\
553 			return (NULL);					\
554 		}							\
555 		sibdir = elmdir ^ _RB_LR;				\
556 		/* the other edge must change length */			\
557 		_RB_MOD_XOR(_RB_UP(parent, field), sibdir);		\
558 		if ((_RB_BITS(gpar) & _RB_LR) == 0) {			\
559 			/* both edges now short, retry from parent */	\
560 			child = elm;					\
561 			elm = parent;					\
562 			continue;					\
563 		}							\
564 		_RB_UP(parent, field) = gpar = _RB_PTR(gpar);		\
565 		if (_RB_BITSUP(elm, field) & elmdir) {			\
566 			/*						\
567 			 * Exactly one of the edges descending from elm \
568 			 * is long. The long one is in the same		\
569 			 * direction as the edge from parent to elm,	\
570 			 * so change that by rotation.  The edge from	\
571 			 * parent to z was shortened above.  Shorten	\
572 			 * the long edge down from elm, and adjust	\
573 			 * other edge lengths based on the downward	\
574 			 * edges from 'child'.				\
575 			 *						\
576 			 *	     par		 par		\
577 			 *	    /	\		/   \		\
578 			 *	  elm	 z	       /     z		\
579 			 *	 /  \		     child		\
580 			 *	/  child	     /	 \		\
581 			 *     /   /  \		   elm	  \		\
582 			 *    w	  /    \	  /   \    y		\
583 			 *	 x      y	 w     \		\
584 			 *				x		\
585 			 */						\
586 			RB_ROTATE(elm, child, elmdir, field);		\
587 			child_up = _RB_UP(child, field);		\
588 			if (_RB_BITS(child_up) & sibdir)		\
589 				_RB_MOD_XOR(_RB_UP(parent, field),	\
590 				    elmdir); 				\
591 			if (_RB_BITS(child_up) & elmdir)		\
592 				_RB_MOD_XOR(_RB_UP(elm, field),		\
593 				    _RB_LR);				\
594 			else						\
595 				_RB_MOD_XOR(_RB_UP(elm, field),		\
596 				    elmdir);				\
597 			/* if child is a leaf, don't augment elm,	\
598 			 * since it is restored to be a leaf again. */	\
599 			if ((_RB_BITS(child_up) & _RB_LR) == 0)		\
600 				elm = child;				\
601 		} else							\
602 			child = elm;					\
603 									\
604 		/*							\
605 		 * The long edge descending from 'child' points back	\
606 		 * in the direction of 'parent'. Rotate to make		\
607 		 * 'parent' a child of 'child', then make both edges	\
608 		 * of 'child' short to rebalance.			\
609 		 *							\
610 		 *	     par		 child			\
611 		 *	    /	\		/     \			\
612 		 *	   /	 z	       x       par		\
613 		 *	child			      /	  \		\
614 		 *	 /  \			     /	   z		\
615 		 *	x    \			    y			\
616 		 *	      y						\
617 		 */							\
618 		RB_ROTATE(parent, child, sibdir, field);		\
619 		_RB_UP(child, field) = gpar;				\
620 		RB_SWAP_CHILD(head, gpar, parent, child, field);	\
621 		/*							\
622 		 * Elements rotated down have new, smaller subtrees,	\
623 		 * so update augmentation for them.			\
624 		 */							\
625 		if (elm != child)					\
626 			(void)RB_AUGMENT_CHECK(elm);			\
627 		(void)RB_AUGMENT_CHECK(parent);				\
628 		return (child);						\
629 	} while ((parent = gpar) != NULL);				\
630 	return (NULL);							\
631 }
632 
633 #ifndef RB_STRICT_HST
634 /*
635  * In REMOVE_COLOR, the HST paper, in figure 3, in the single-rotate case, has
636  * 'parent' with one higher rank, and then reduces its rank if 'parent' has
637  * become a leaf.  This implementation always has the parent in its new position
638  * with lower rank, to avoid the leaf check.  Define RB_STRICT_HST to 1 to get
639  * the behavior that HST describes.
640  */
641 #define RB_STRICT_HST 0
642 #endif
643 
644 #define RB_GENERATE_REMOVE_COLOR(name, type, field, attr)		\
645 attr struct type *							\
646 name##_RB_REMOVE_COLOR(struct name *head,				\
647     struct type *parent, struct type *elm)				\
648 {									\
649 	struct type *gpar, *sib, *up;					\
650 	__uintptr_t elmdir, sibdir;					\
651 									\
652 	if (RB_RIGHT(parent, field) == elm &&				\
653 	    RB_LEFT(parent, field) == elm) {				\
654 		/* Deleting a leaf that is an only-child creates a	\
655 		 * rank-2 leaf. Demote that leaf. */			\
656 		_RB_UP(parent, field) = _RB_PTR(_RB_UP(parent, field));	\
657 		elm = parent;						\
658 		if ((parent = _RB_UP(elm, field)) == NULL)		\
659 			return (NULL);					\
660 	}								\
661 	do {								\
662 		/* the rank of the tree rooted at elm shrank */		\
663 		gpar = _RB_UP(parent, field);				\
664 		elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
665 		_RB_MOD_XOR(gpar, elmdir);				\
666 		if (_RB_BITS(gpar) & elmdir) {				\
667 			/* lengthen the parent-elm edge to rebalance */	\
668 			_RB_UP(parent, field) = gpar;			\
669 			return (NULL);					\
670 		}							\
671 		if (_RB_BITS(gpar) & _RB_LR) {				\
672 			/* shorten other edge, retry from parent */	\
673 			_RB_MOD_XOR(gpar, _RB_LR);			\
674 			_RB_UP(parent, field) = gpar;			\
675 			gpar = _RB_PTR(gpar);				\
676 			continue;					\
677 		}							\
678 		sibdir = elmdir ^ _RB_LR;				\
679 		sib = _RB_LINK(parent, sibdir, field);			\
680 		up = _RB_UP(sib, field);				\
681 		_RB_MOD_XOR(up, _RB_LR);				\
682 		if ((_RB_BITS(up) & _RB_LR) == 0) {			\
683 			/* shorten edges descending from sib, retry */	\
684 			_RB_UP(sib, field) = up;			\
685 			continue;					\
686 		}							\
687 		if ((_RB_BITS(up) & sibdir) == 0) {			\
688 			/*						\
689 			 * The edge descending from 'sib' away from	\
690 			 * 'parent' is long.  The short edge descending	\
691 			 * from 'sib' toward 'parent' points to 'elm*'	\
692 			 * Rotate to make 'sib' a child of 'elm*'	\
693 			 * then adjust the lengths of the edges		\
694 			 * descending from 'sib' and 'elm*'.		\
695 			 *						\
696 			 *	     par		 par		\
697 			 *	    /	\		/   \		\
698 			 *	   /	sib	      elm    \		\
699 			 *	  /	/ \	            elm*	\
700 			 *	elm   elm* \	            /  \	\
701 			 *	      /	\   \		   /    \	\
702 			 *	     /   \   z		  /      \	\
703 			 *	    x	  y		 x      sib	\
704 			 *				        /  \	\
705 			 *				       /    z	\
706 			 *				      y		\
707 			 */						\
708 			elm = _RB_LINK(sib, elmdir, field);		\
709 			/* elm is a 1-child.  First rotate at elm. */	\
710 			RB_ROTATE(sib, elm, sibdir, field);		\
711 			up = _RB_UP(elm, field);			\
712 			_RB_MOD_XOR(_RB_UP(parent, field),		\
713 			    (_RB_BITS(up) & elmdir) ? _RB_LR : elmdir);	\
714 			_RB_MOD_XOR(_RB_UP(sib, field),			\
715 			    (_RB_BITS(up) & sibdir) ? _RB_LR : sibdir);	\
716 			_RB_MOD_OR(_RB_UP(elm, field), _RB_LR);		\
717 		} else {						\
718 			if ((_RB_BITS(up) & elmdir) == 0 &&		\
719 			    RB_STRICT_HST && elm != NULL) {		\
720 				/* if parent does not become a leaf,	\
721 				   do not demote parent yet. */		\
722 				_RB_MOD_XOR(_RB_UP(parent, field),	\
723 				    sibdir);				\
724 				_RB_MOD_XOR(_RB_UP(sib, field),		\
725 				    _RB_LR);				\
726 			} else if ((_RB_BITS(up) & elmdir) == 0) {	\
727 				/* demote parent. */			\
728 				_RB_MOD_XOR(_RB_UP(parent, field),	\
729 				    elmdir);				\
730 				_RB_MOD_XOR(_RB_UP(sib, field),		\
731 				    sibdir);				\
732 			} else						\
733 				_RB_MOD_XOR(_RB_UP(sib, field),		\
734 				    sibdir);				\
735 			elm = sib;					\
736 		}							\
737 									\
738 		/*							\
739 		 * The edge descending from 'elm' away from 'parent'	\
740 		 * is short.  Rotate to make 'parent' a child of 'elm', \
741 		 * then lengthen the short edges descending from	\
742 		 * 'parent' and 'elm' to rebalance.			\
743 		 *							\
744 		 *	     par		 elm			\
745 		 *	    /	\		/   \			\
746 		 *	   e	 \	       /     \			\
747 		 *		 elm	      /	      \			\
748 		 *		/  \	    par	       s		\
749 		 *	       /    \	   /   \			\
750 		 *	      /	     \	  e	\			\
751 		 *	     x	      s		 x			\
752 		 */							\
753 		RB_ROTATE(parent, elm, elmdir, field);			\
754 		RB_SET_PARENT(elm, gpar, field);			\
755 		RB_SWAP_CHILD(head, gpar, parent, elm, field);		\
756 		/*							\
757 		 * An element rotated down, but not into the search	\
758 		 * path has a new, smaller subtree, so update		\
759 		 * augmentation for it.					\
760 		 */							\
761 		if (sib != elm)						\
762 			(void)RB_AUGMENT_CHECK(sib);			\
763 		return (parent);					\
764 	} while (elm = parent, (parent = gpar) != NULL);		\
765 	return (NULL);							\
766 }
767 
768 #define _RB_AUGMENT_WALK(elm, match, field)				\
769 do {									\
770 	if (match == elm)						\
771 		match = NULL;						\
772 } while (RB_AUGMENT_CHECK(elm) &&					\
773     (elm = RB_PARENT(elm, field)) != NULL)
774 
775 #define RB_GENERATE_REMOVE(name, type, field, attr)			\
776 attr struct type *							\
777 name##_RB_REMOVE(struct name *head, struct type *out)			\
778 {									\
779 	struct type *child, *in, *opar, *parent;			\
780 									\
781 	child = RB_LEFT(out, field);					\
782 	in = RB_RIGHT(out, field);					\
783 	opar = _RB_UP(out, field);					\
784 	if (in == NULL || child == NULL) {				\
785 		in = child = (in == NULL ? child : in);			\
786 		parent = opar = _RB_PTR(opar);				\
787 	} else {							\
788 		parent = in;						\
789 		while (RB_LEFT(in, field))				\
790 			in = RB_LEFT(in, field);			\
791 		RB_SET_PARENT(child, in, field);			\
792 		RB_LEFT(in, field) = child;				\
793 		child = RB_RIGHT(in, field);				\
794 		if (parent != in) {					\
795 			RB_SET_PARENT(parent, in, field);		\
796 			RB_RIGHT(in, field) = parent;			\
797 			parent = RB_PARENT(in, field);			\
798 			RB_LEFT(parent, field) = child;			\
799 		}							\
800 		_RB_UP(in, field) = opar;				\
801 		opar = _RB_PTR(opar);					\
802 	}								\
803 	RB_SWAP_CHILD(head, opar, out, in, field);			\
804 	if (child != NULL)						\
805 		_RB_UP(child, field) = parent;				\
806 	if (parent != NULL) {						\
807 		opar = name##_RB_REMOVE_COLOR(head, parent, child);	\
808 		/* if rotation has made 'parent' the root of the same	\
809 		 * subtree as before, don't re-augment it. */		\
810 		if (parent == in && RB_LEFT(parent, field) == NULL) {	\
811 			opar = NULL;					\
812 			parent = RB_PARENT(parent, field);		\
813 		}							\
814 		_RB_AUGMENT_WALK(parent, opar, field);			\
815 		if (opar != NULL) {					\
816 			/*						\
817 			 * Elements rotated into the search path have	\
818 			 * changed subtrees, so update augmentation for	\
819 			 * them if AUGMENT_WALK didn't.			\
820 			 */						\
821 			(void)RB_AUGMENT_CHECK(opar);			\
822 			(void)RB_AUGMENT_CHECK(RB_PARENT(opar, field));	\
823 		}							\
824 	}								\
825 	return (out);							\
826 }
827 
828 #define RB_GENERATE_INSERT_FINISH(name, type, field, attr)		\
829 /* Inserts a node into the RB tree */					\
830 attr struct type *							\
831 name##_RB_INSERT_FINISH(struct name *head, struct type *parent,		\
832     struct type **pptr, struct type *elm)				\
833 {									\
834 	struct type *tmp = NULL;					\
835 									\
836 	RB_SET(elm, parent, field);					\
837 	*pptr = elm;							\
838 	if (parent != NULL)						\
839 		tmp = name##_RB_INSERT_COLOR(head, parent, elm);	\
840 	_RB_AUGMENT_WALK(elm, tmp, field);				\
841 	if (tmp != NULL)						\
842 		/*							\
843 		 * An element rotated into the search path has a	\
844 		 * changed subtree, so update augmentation for it if	\
845 		 * AUGMENT_WALK didn't.					\
846 		 */							\
847 		(void)RB_AUGMENT_CHECK(tmp);				\
848 	return (NULL);							\
849 }
850 
851 #define RB_GENERATE_INSERT(name, type, field, cmp, attr)		\
852 /* Inserts a node into the RB tree */					\
853 attr struct type *							\
854 name##_RB_INSERT(struct name *head, struct type *elm)			\
855 {									\
856 	struct type *tmp;						\
857 	struct type **tmpp = &RB_ROOT(head);				\
858 	struct type *parent = NULL;					\
859 									\
860 	while ((tmp = *tmpp) != NULL) {					\
861 		parent = tmp;						\
862 		__typeof(cmp(NULL, NULL)) comp = (cmp)(elm, parent);	\
863 		if (comp < 0)						\
864 			tmpp = &RB_LEFT(parent, field);			\
865 		else if (comp > 0)					\
866 			tmpp = &RB_RIGHT(parent, field);		\
867 		else							\
868 			return (parent);				\
869 	}								\
870 	return (name##_RB_INSERT_FINISH(head, parent, tmpp, elm));	\
871 }
872 
873 #define RB_GENERATE_FIND(name, type, field, cmp, attr)			\
874 /* Finds the node with the same key as elm */				\
875 attr struct type *							\
876 name##_RB_FIND(struct name *head, struct type *elm)			\
877 {									\
878 	struct type *tmp = RB_ROOT(head);				\
879 	__typeof(cmp(NULL, NULL)) comp;					\
880 	while (tmp) {							\
881 		comp = cmp(elm, tmp);					\
882 		if (comp < 0)						\
883 			tmp = RB_LEFT(tmp, field);			\
884 		else if (comp > 0)					\
885 			tmp = RB_RIGHT(tmp, field);			\
886 		else							\
887 			return (tmp);					\
888 	}								\
889 	return (NULL);							\
890 }
891 
892 #define RB_GENERATE_NFIND(name, type, field, cmp, attr)			\
893 /* Finds the first node greater than or equal to the search key */	\
894 attr struct type *							\
895 name##_RB_NFIND(struct name *head, struct type *elm)			\
896 {									\
897 	struct type *tmp = RB_ROOT(head);				\
898 	struct type *res = NULL;					\
899 	__typeof(cmp(NULL, NULL)) comp;					\
900 	while (tmp) {							\
901 		comp = cmp(elm, tmp);					\
902 		if (comp < 0) {						\
903 			res = tmp;					\
904 			tmp = RB_LEFT(tmp, field);			\
905 		}							\
906 		else if (comp > 0)					\
907 			tmp = RB_RIGHT(tmp, field);			\
908 		else							\
909 			return (tmp);					\
910 	}								\
911 	return (res);							\
912 }
913 
914 #define RB_GENERATE_NEXT(name, type, field, attr)			\
915 /* ARGSUSED */								\
916 attr struct type *							\
917 name##_RB_NEXT(struct type *elm)					\
918 {									\
919 	if (RB_RIGHT(elm, field)) {					\
920 		elm = RB_RIGHT(elm, field);				\
921 		while (RB_LEFT(elm, field))				\
922 			elm = RB_LEFT(elm, field);			\
923 	} else {							\
924 		while (RB_PARENT(elm, field) &&				\
925 		    (elm == RB_RIGHT(RB_PARENT(elm, field), field)))	\
926 			elm = RB_PARENT(elm, field);			\
927 		elm = RB_PARENT(elm, field);				\
928 	}								\
929 	return (elm);							\
930 }
931 
932 #if defined(_KERNEL) && defined(DIAGNOSTIC)
933 #define _RB_ORDER_CHECK(cmp, lo, hi) do {				\
934 	KASSERT((cmp)(lo, hi) < 0, ("out of order insertion"));		\
935 } while (0)
936 #else
937 #define _RB_ORDER_CHECK(cmp, lo, hi) do {} while (0)
938 #endif
939 
940 #define RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr)		\
941 /* Inserts a node into the next position in the RB tree */		\
942 attr struct type *							\
943 name##_RB_INSERT_NEXT(struct name *head,				\
944     struct type *elm, struct type *next)				\
945 {									\
946 	struct type *tmp;						\
947 	struct type **tmpp = &RB_RIGHT(elm, field);			\
948 									\
949 	_RB_ORDER_CHECK(cmp, elm, next);				\
950 	if (name##_RB_NEXT(elm) != NULL)				\
951 		_RB_ORDER_CHECK(cmp, next, name##_RB_NEXT(elm));	\
952 	while ((tmp = *tmpp) != NULL) {					\
953 		elm = tmp;						\
954 		tmpp = &RB_LEFT(elm, field);				\
955 	}								\
956 	return (name##_RB_INSERT_FINISH(head, elm, tmpp, next));	\
957 }
958 
959 #define RB_GENERATE_PREV(name, type, field, attr)			\
960 /* ARGSUSED */								\
961 attr struct type *							\
962 name##_RB_PREV(struct type *elm)					\
963 {									\
964 	if (RB_LEFT(elm, field)) {					\
965 		elm = RB_LEFT(elm, field);				\
966 		while (RB_RIGHT(elm, field))				\
967 			elm = RB_RIGHT(elm, field);			\
968 	} else {							\
969 		while (RB_PARENT(elm, field) &&				\
970 		    (elm == RB_LEFT(RB_PARENT(elm, field), field)))	\
971 			elm = RB_PARENT(elm, field);			\
972 		elm = RB_PARENT(elm, field);				\
973 	}								\
974 	return (elm);							\
975 }
976 
977 #define RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr)		\
978 /* Inserts a node into the prev position in the RB tree */		\
979 attr struct type *							\
980 name##_RB_INSERT_PREV(struct name *head,				\
981     struct type *elm, struct type *prev)				\
982 {									\
983 	struct type *tmp;						\
984 	struct type **tmpp = &RB_LEFT(elm, field);			\
985 									\
986 	_RB_ORDER_CHECK(cmp, prev, elm);				\
987 	if (name##_RB_PREV(elm) != NULL)				\
988 		_RB_ORDER_CHECK(cmp, name##_RB_PREV(elm), prev);	\
989 	while ((tmp = *tmpp) != NULL) {					\
990 		elm = tmp;						\
991 		tmpp = &RB_RIGHT(elm, field);				\
992 	}								\
993 	return (name##_RB_INSERT_FINISH(head, elm, tmpp, prev));	\
994 }
995 
996 #define RB_GENERATE_MINMAX(name, type, field, attr)			\
997 attr struct type *							\
998 name##_RB_MINMAX(struct name *head, int val)				\
999 {									\
1000 	struct type *tmp = RB_ROOT(head);				\
1001 	struct type *parent = NULL;					\
1002 	while (tmp) {							\
1003 		parent = tmp;						\
1004 		if (val < 0)						\
1005 			tmp = RB_LEFT(tmp, field);			\
1006 		else							\
1007 			tmp = RB_RIGHT(tmp, field);			\
1008 	}								\
1009 	return (parent);						\
1010 }
1011 
1012 #define	RB_GENERATE_REINSERT(name, type, field, cmp, attr)		\
1013 attr struct type *							\
1014 name##_RB_REINSERT(struct name *head, struct type *elm)			\
1015 {									\
1016 	struct type *cmpelm;						\
1017 	if (((cmpelm = RB_PREV(name, head, elm)) != NULL &&		\
1018 	    cmp(cmpelm, elm) >= 0) ||					\
1019 	    ((cmpelm = RB_NEXT(name, head, elm)) != NULL &&		\
1020 	    cmp(elm, cmpelm) >= 0)) {					\
1021 		/* XXXLAS: Remove/insert is heavy handed. */		\
1022 		RB_REMOVE(name, head, elm);				\
1023 		return (RB_INSERT(name, head, elm));			\
1024 	}								\
1025 	return (NULL);							\
1026 }									\
1027 
1028 #define RB_NEGINF	-1
1029 #define RB_INF	1
1030 
1031 #define RB_INSERT(name, x, y)	name##_RB_INSERT(x, y)
1032 #define RB_INSERT_NEXT(name, x, y, z)	name##_RB_INSERT_NEXT(x, y, z)
1033 #define RB_INSERT_PREV(name, x, y, z)	name##_RB_INSERT_PREV(x, y, z)
1034 #define RB_REMOVE(name, x, y)	name##_RB_REMOVE(x, y)
1035 #define RB_FIND(name, x, y)	name##_RB_FIND(x, y)
1036 #define RB_NFIND(name, x, y)	name##_RB_NFIND(x, y)
1037 #define RB_NEXT(name, x, y)	name##_RB_NEXT(y)
1038 #define RB_PREV(name, x, y)	name##_RB_PREV(y)
1039 #define RB_MIN(name, x)		name##_RB_MINMAX(x, RB_NEGINF)
1040 #define RB_MAX(name, x)		name##_RB_MINMAX(x, RB_INF)
1041 #define RB_REINSERT(name, x, y)	name##_RB_REINSERT(x, y)
1042 
1043 #define RB_FOREACH(x, name, head)					\
1044 	for ((x) = RB_MIN(name, head);					\
1045 	     (x) != NULL;						\
1046 	     (x) = name##_RB_NEXT(x))
1047 
1048 #define RB_FOREACH_FROM(x, name, y)					\
1049 	for ((x) = (y);							\
1050 	    ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL);	\
1051 	     (x) = (y))
1052 
1053 #define RB_FOREACH_SAFE(x, name, head, y)				\
1054 	for ((x) = RB_MIN(name, head);					\
1055 	    ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL);	\
1056 	     (x) = (y))
1057 
1058 #define RB_FOREACH_REVERSE(x, name, head)				\
1059 	for ((x) = RB_MAX(name, head);					\
1060 	     (x) != NULL;						\
1061 	     (x) = name##_RB_PREV(x))
1062 
1063 #define RB_FOREACH_REVERSE_FROM(x, name, y)				\
1064 	for ((x) = (y);							\
1065 	    ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL);	\
1066 	     (x) = (y))
1067 
1068 #define RB_FOREACH_REVERSE_SAFE(x, name, head, y)			\
1069 	for ((x) = RB_MAX(name, head);					\
1070 	    ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL);	\
1071 	     (x) = (y))
1072 
1073 #endif	/* _SYS_TREE_H_ */
1074