1 // SPDX-License-Identifier: GPL-2.0-only
2 #include <linux/module.h>
3 #include <linux/moduleparam.h>
4 #include <linux/interval_tree.h>
5 #include <linux/prandom.h>
6 #include <linux/slab.h>
7 #include <asm/timex.h>
8 #include <linux/bitmap.h>
9 #include <linux/maple_tree.h>
10
11 #define __param(type, name, init, msg) \
12 static type name = init; \
13 module_param(name, type, 0444); \
14 MODULE_PARM_DESC(name, msg);
15
16 __param(int, nnodes, 100, "Number of nodes in the interval tree");
17 __param(int, perf_loops, 1000, "Number of iterations modifying the tree");
18
19 __param(int, nsearches, 100, "Number of searches to the interval tree");
20 __param(int, search_loops, 1000, "Number of iterations searching the tree");
21 __param(bool, search_all, false, "Searches will iterate all nodes in the tree");
22
23 __param(uint, max_endpoint, ~0, "Largest value for the interval's endpoint");
24 __param(ullong, seed, 3141592653589793238ULL, "Random seed");
25
26 static struct rb_root_cached root = RB_ROOT_CACHED;
27 static struct interval_tree_node *nodes = NULL;
28 static u32 *queries = NULL;
29
30 static struct rnd_state rnd;
31
32 static inline unsigned long
search(struct rb_root_cached * root,unsigned long start,unsigned long last)33 search(struct rb_root_cached *root, unsigned long start, unsigned long last)
34 {
35 struct interval_tree_node *node;
36 unsigned long results = 0;
37
38 for (node = interval_tree_iter_first(root, start, last); node;
39 node = interval_tree_iter_next(node, start, last))
40 results++;
41 return results;
42 }
43
init(void)44 static void init(void)
45 {
46 int i;
47
48 for (i = 0; i < nnodes; i++) {
49 u32 b = (prandom_u32_state(&rnd) >> 4) % max_endpoint;
50 u32 a = (prandom_u32_state(&rnd) >> 4) % b;
51
52 nodes[i].start = a;
53 nodes[i].last = b;
54 }
55
56 /*
57 * Limit the search scope to what the user defined.
58 * Otherwise we are merely measuring empty walks,
59 * which is pointless.
60 */
61 for (i = 0; i < nsearches; i++)
62 queries[i] = (prandom_u32_state(&rnd) >> 4) % max_endpoint;
63 }
64
basic_check(void)65 static int basic_check(void)
66 {
67 int i, j;
68 cycles_t time1, time2, time;
69
70 printk(KERN_ALERT "interval tree insert/remove");
71
72 init();
73
74 time1 = get_cycles();
75
76 for (i = 0; i < perf_loops; i++) {
77 for (j = 0; j < nnodes; j++)
78 interval_tree_insert(nodes + j, &root);
79 for (j = 0; j < nnodes; j++)
80 interval_tree_remove(nodes + j, &root);
81 }
82
83 time2 = get_cycles();
84 time = time2 - time1;
85
86 time = div_u64(time, perf_loops);
87 printk(" -> %llu cycles\n", (unsigned long long)time);
88
89 return 0;
90 }
91
search_check(void)92 static int search_check(void)
93 {
94 int i, j;
95 unsigned long results;
96 cycles_t time1, time2, time;
97
98 printk(KERN_ALERT "interval tree search");
99
100 init();
101
102 for (j = 0; j < nnodes; j++)
103 interval_tree_insert(nodes + j, &root);
104
105 time1 = get_cycles();
106
107 results = 0;
108 for (i = 0; i < search_loops; i++)
109 for (j = 0; j < nsearches; j++) {
110 unsigned long start = search_all ? 0 : queries[j];
111 unsigned long last = search_all ? max_endpoint : queries[j];
112
113 results += search(&root, start, last);
114 }
115
116 time2 = get_cycles();
117 time = time2 - time1;
118
119 time = div_u64(time, search_loops);
120 results = div_u64(results, search_loops);
121 printk(" -> %llu cycles (%lu results)\n",
122 (unsigned long long)time, results);
123
124 for (j = 0; j < nnodes; j++)
125 interval_tree_remove(nodes + j, &root);
126
127 return 0;
128 }
129
intersection_range_check(void)130 static int intersection_range_check(void)
131 {
132 int i, j, k;
133 unsigned long start, last;
134 struct interval_tree_node *node;
135 unsigned long *intxn1;
136 unsigned long *intxn2;
137
138 printk(KERN_ALERT "interval tree iteration\n");
139
140 intxn1 = bitmap_alloc(nnodes, GFP_KERNEL);
141 if (!intxn1) {
142 WARN_ON_ONCE("Failed to allocate intxn1\n");
143 return -ENOMEM;
144 }
145
146 intxn2 = bitmap_alloc(nnodes, GFP_KERNEL);
147 if (!intxn2) {
148 WARN_ON_ONCE("Failed to allocate intxn2\n");
149 bitmap_free(intxn1);
150 return -ENOMEM;
151 }
152
153 for (i = 0; i < search_loops; i++) {
154 /* Initialize interval tree for each round */
155 init();
156 for (j = 0; j < nnodes; j++)
157 interval_tree_insert(nodes + j, &root);
158
159 /* Let's try nsearches different ranges */
160 for (k = 0; k < nsearches; k++) {
161 /* Try whole range once */
162 if (!k) {
163 start = 0UL;
164 last = ULONG_MAX;
165 } else {
166 last = (prandom_u32_state(&rnd) >> 4) % max_endpoint;
167 start = (prandom_u32_state(&rnd) >> 4) % last;
168 }
169
170 /* Walk nodes to mark intersection nodes */
171 bitmap_zero(intxn1, nnodes);
172 for (j = 0; j < nnodes; j++) {
173 node = nodes + j;
174
175 if (start <= node->last && last >= node->start)
176 bitmap_set(intxn1, j, 1);
177 }
178
179 /* Iterate tree to clear intersection nodes */
180 bitmap_zero(intxn2, nnodes);
181 for (node = interval_tree_iter_first(&root, start, last); node;
182 node = interval_tree_iter_next(node, start, last))
183 bitmap_set(intxn2, node - nodes, 1);
184
185 WARN_ON_ONCE(!bitmap_equal(intxn1, intxn2, nnodes));
186 }
187
188 for (j = 0; j < nnodes; j++)
189 interval_tree_remove(nodes + j, &root);
190 }
191
192 bitmap_free(intxn1);
193 bitmap_free(intxn2);
194 return 0;
195 }
196
197 #ifdef CONFIG_INTERVAL_TREE_SPAN_ITER
198 /*
199 * Helper function to get span of current position from maple tree point of
200 * view.
201 */
mas_cur_span(struct ma_state * mas,struct interval_tree_span_iter * state)202 static void mas_cur_span(struct ma_state *mas, struct interval_tree_span_iter *state)
203 {
204 unsigned long cur_start;
205 unsigned long cur_last;
206 int is_hole;
207
208 if (mas->status == ma_overflow)
209 return;
210
211 /* walk to current position */
212 state->is_hole = mas_walk(mas) ? 0 : 1;
213
214 cur_start = mas->index < state->first_index ?
215 state->first_index : mas->index;
216
217 /* whether we have followers */
218 do {
219
220 cur_last = mas->last > state->last_index ?
221 state->last_index : mas->last;
222
223 is_hole = mas_next_range(mas, state->last_index) ? 0 : 1;
224
225 } while (mas->status != ma_overflow && is_hole == state->is_hole);
226
227 if (state->is_hole) {
228 state->start_hole = cur_start;
229 state->last_hole = cur_last;
230 } else {
231 state->start_used = cur_start;
232 state->last_used = cur_last;
233 }
234
235 /* advance position for next round */
236 if (mas->status != ma_overflow)
237 mas_set(mas, cur_last + 1);
238 }
239
span_iteration_check(void)240 static int span_iteration_check(void)
241 {
242 int i, j, k;
243 unsigned long start, last;
244 struct interval_tree_span_iter span, mas_span;
245
246 DEFINE_MTREE(tree);
247
248 MA_STATE(mas, &tree, 0, 0);
249
250 printk(KERN_ALERT "interval tree span iteration\n");
251
252 for (i = 0; i < search_loops; i++) {
253 /* Initialize interval tree for each round */
254 init();
255 for (j = 0; j < nnodes; j++)
256 interval_tree_insert(nodes + j, &root);
257
258 /* Put all the range into maple tree */
259 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
260 mt_set_in_rcu(&tree);
261
262 for (j = 0; j < nnodes; j++)
263 WARN_ON_ONCE(mtree_store_range(&tree, nodes[j].start,
264 nodes[j].last, nodes + j, GFP_KERNEL));
265
266 /* Let's try nsearches different ranges */
267 for (k = 0; k < nsearches; k++) {
268 /* Try whole range once */
269 if (!k) {
270 start = 0UL;
271 last = ULONG_MAX;
272 } else {
273 last = (prandom_u32_state(&rnd) >> 4) % max_endpoint;
274 start = (prandom_u32_state(&rnd) >> 4) % last;
275 }
276
277 mas_span.first_index = start;
278 mas_span.last_index = last;
279 mas_span.is_hole = -1;
280 mas_set(&mas, start);
281
282 interval_tree_for_each_span(&span, &root, start, last) {
283 mas_cur_span(&mas, &mas_span);
284
285 WARN_ON_ONCE(span.is_hole != mas_span.is_hole);
286
287 if (span.is_hole) {
288 WARN_ON_ONCE(span.start_hole != mas_span.start_hole);
289 WARN_ON_ONCE(span.last_hole != mas_span.last_hole);
290 } else {
291 WARN_ON_ONCE(span.start_used != mas_span.start_used);
292 WARN_ON_ONCE(span.last_used != mas_span.last_used);
293 }
294 }
295
296 }
297
298 WARN_ON_ONCE(mas.status != ma_overflow);
299
300 /* Cleanup maple tree for each round */
301 mtree_destroy(&tree);
302 /* Cleanup interval tree for each round */
303 for (j = 0; j < nnodes; j++)
304 interval_tree_remove(nodes + j, &root);
305 }
306 return 0;
307 }
308 #else
span_iteration_check(void)309 static inline int span_iteration_check(void) {return 0; }
310 #endif
311
interval_tree_test_init(void)312 static int interval_tree_test_init(void)
313 {
314 nodes = kmalloc_array(nnodes, sizeof(struct interval_tree_node),
315 GFP_KERNEL);
316 if (!nodes)
317 return -ENOMEM;
318
319 queries = kmalloc_array(nsearches, sizeof(int), GFP_KERNEL);
320 if (!queries) {
321 kfree(nodes);
322 return -ENOMEM;
323 }
324
325 prandom_seed_state(&rnd, seed);
326
327 basic_check();
328 search_check();
329 intersection_range_check();
330 span_iteration_check();
331
332 kfree(queries);
333 kfree(nodes);
334
335 return -EAGAIN; /* Fail will directly unload the module */
336 }
337
interval_tree_test_exit(void)338 static void interval_tree_test_exit(void)
339 {
340 printk(KERN_ALERT "test exit\n");
341 }
342
343 module_init(interval_tree_test_init)
344 module_exit(interval_tree_test_exit)
345
346 MODULE_LICENSE("GPL");
347 MODULE_AUTHOR("Michel Lespinasse");
348 MODULE_DESCRIPTION("Interval Tree test");
349