xref: /linux/lib/interval_tree.c (revision 7fc2cd2e4b398c57c9cf961cfea05eadbf34c05c)
1 // SPDX-License-Identifier: GPL-2.0-only
2 #include <linux/interval_tree.h>
3 #include <linux/interval_tree_generic.h>
4 #include <linux/compiler.h>
5 #include <linux/export.h>
6 
7 #define START(node) ((node)->start)
8 #define LAST(node)  ((node)->last)
9 
10 INTERVAL_TREE_DEFINE(struct interval_tree_node, rb,
11 		     unsigned long, __subtree_last,
12 		     START, LAST,, interval_tree)
13 
14 EXPORT_SYMBOL_GPL(interval_tree_insert);
15 EXPORT_SYMBOL_GPL(interval_tree_remove);
16 EXPORT_SYMBOL_GPL(interval_tree_subtree_search);
17 EXPORT_SYMBOL_GPL(interval_tree_iter_first);
18 EXPORT_SYMBOL_GPL(interval_tree_iter_next);
19 
20 #ifdef CONFIG_INTERVAL_TREE_SPAN_ITER
21 /*
22  * Roll nodes[1] into nodes[0] by advancing nodes[1] to the end of a contiguous
23  * span of nodes. This makes nodes[0]->last the end of that contiguous used span
24  * of indexes that started at the original nodes[1]->start.
25  *
26  * If there is an interior hole, nodes[1] is now the first node starting the
27  * next used span. A hole span is between nodes[0]->last and nodes[1]->start.
28  *
29  * If there is a tailing hole, nodes[1] is now NULL. A hole span is between
30  * nodes[0]->last and last_index.
31  *
32  * If the contiguous used range span to last_index, nodes[1] is set to NULL.
33  */
34 static void
35 interval_tree_span_iter_next_gap(struct interval_tree_span_iter *state)
36 {
37 	struct interval_tree_node *cur = state->nodes[1];
38 
39 	state->nodes[0] = cur;
40 	do {
41 		if (cur->last > state->nodes[0]->last)
42 			state->nodes[0] = cur;
43 		cur = interval_tree_iter_next(cur, state->first_index,
44 					      state->last_index);
45 	} while (cur && (state->nodes[0]->last >= cur->start ||
46 			 state->nodes[0]->last + 1 == cur->start));
47 	state->nodes[1] = cur;
48 }
49 
50 void interval_tree_span_iter_first(struct interval_tree_span_iter *iter,
51 				   struct rb_root_cached *itree,
52 				   unsigned long first_index,
53 				   unsigned long last_index)
54 {
55 	iter->first_index = first_index;
56 	iter->last_index = last_index;
57 	iter->nodes[0] = NULL;
58 	iter->nodes[1] =
59 		interval_tree_iter_first(itree, first_index, last_index);
60 	if (!iter->nodes[1]) {
61 		/* No nodes intersect the span, whole span is hole */
62 		iter->start_hole = first_index;
63 		iter->last_hole = last_index;
64 		iter->is_hole = 1;
65 		return;
66 	}
67 	if (iter->nodes[1]->start > first_index) {
68 		/* Leading hole on first iteration */
69 		iter->start_hole = first_index;
70 		iter->last_hole = iter->nodes[1]->start - 1;
71 		iter->is_hole = 1;
72 		interval_tree_span_iter_next_gap(iter);
73 		return;
74 	}
75 
76 	/* Starting inside a used */
77 	iter->start_used = first_index;
78 	iter->is_hole = 0;
79 	interval_tree_span_iter_next_gap(iter);
80 	iter->last_used = iter->nodes[0]->last;
81 	if (iter->last_used >= last_index) {
82 		iter->last_used = last_index;
83 		iter->nodes[0] = NULL;
84 		iter->nodes[1] = NULL;
85 	}
86 }
87 EXPORT_SYMBOL_GPL(interval_tree_span_iter_first);
88 
89 void interval_tree_span_iter_next(struct interval_tree_span_iter *iter)
90 {
91 	if (!iter->nodes[0] && !iter->nodes[1]) {
92 		iter->is_hole = -1;
93 		return;
94 	}
95 
96 	if (iter->is_hole) {
97 		iter->start_used = iter->last_hole + 1;
98 		iter->last_used = iter->nodes[0]->last;
99 		if (iter->last_used >= iter->last_index) {
100 			iter->last_used = iter->last_index;
101 			iter->nodes[0] = NULL;
102 			iter->nodes[1] = NULL;
103 		}
104 		iter->is_hole = 0;
105 		return;
106 	}
107 
108 	if (!iter->nodes[1]) {
109 		/* Trailing hole */
110 		iter->start_hole = iter->nodes[0]->last + 1;
111 		iter->last_hole = iter->last_index;
112 		iter->nodes[0] = NULL;
113 		iter->is_hole = 1;
114 		return;
115 	}
116 
117 	/* must have both nodes[0] and [1], interior hole */
118 	iter->start_hole = iter->nodes[0]->last + 1;
119 	iter->last_hole = iter->nodes[1]->start - 1;
120 	iter->is_hole = 1;
121 	interval_tree_span_iter_next_gap(iter);
122 }
123 EXPORT_SYMBOL_GPL(interval_tree_span_iter_next);
124 
125 /*
126  * Advance the iterator index to a specific position. The returned used/hole is
127  * updated to start at new_index. This is faster than calling
128  * interval_tree_span_iter_first() as it can avoid full searches in several
129  * cases where the iterator is already set.
130  */
131 void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter,
132 				     struct rb_root_cached *itree,
133 				     unsigned long new_index)
134 {
135 	if (iter->is_hole == -1)
136 		return;
137 
138 	iter->first_index = new_index;
139 	if (new_index > iter->last_index) {
140 		iter->is_hole = -1;
141 		return;
142 	}
143 
144 	/* Rely on the union aliasing hole/used */
145 	if (iter->start_hole <= new_index && new_index <= iter->last_hole) {
146 		iter->start_hole = new_index;
147 		return;
148 	}
149 	if (new_index == iter->last_hole + 1)
150 		interval_tree_span_iter_next(iter);
151 	else
152 		interval_tree_span_iter_first(iter, itree, new_index,
153 					      iter->last_index);
154 }
155 EXPORT_SYMBOL_GPL(interval_tree_span_iter_advance);
156 #endif
157