xref: /linux/lib/test_maple_tree.c (revision 280b792cac62ddadca2935766ca870b438c86323)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * test_maple_tree.c: Test the maple tree API
4  * Copyright (c) 2018-2022 Oracle Corporation
5  * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6  *
7  * Any tests that only require the interface of the tree.
8  */
9 
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
13 
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17 
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt)		do {} while (0)
20 #define mt_validate(mt)			do {} while (0)
21 #define mt_cache_shrink()		do {} while (0)
22 #define mas_dump(mas)			do {} while (0)
23 #define mas_wr_dump(mas)		do {} while (0)
24 atomic_t maple_tree_tests_run;
25 atomic_t maple_tree_tests_passed;
26 #undef MT_BUG_ON
27 
28 #define MT_BUG_ON(__tree, __x) do {					\
29 	atomic_inc(&maple_tree_tests_run);				\
30 	if (__x) {							\
31 		pr_info("BUG at %s:%d (%u)\n",				\
32 		__func__, __LINE__, __x);				\
33 		pr_info("Pass: %u Run:%u\n",				\
34 			atomic_read(&maple_tree_tests_passed),		\
35 			atomic_read(&maple_tree_tests_run));		\
36 	} else {							\
37 		atomic_inc(&maple_tree_tests_passed);			\
38 	}								\
39 } while (0)
40 #endif
41 
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_LOAD */
47 /* #define BENCH_MT_FOR_EACH */
48 /* #define BENCH_FORK */
49 /* #define BENCH_MAS_FOR_EACH */
50 /* #define BENCH_MAS_PREV */
51 
52 #ifdef __KERNEL__
53 #define mt_set_non_kernel(x)		do {} while (0)
54 #define mt_zero_nr_tallocated(x)	do {} while (0)
55 #else
56 #define cond_resched()			do {} while (0)
57 #endif
58 
59 #define mas_is_none(x)		((x)->status == ma_none)
60 #define mas_is_overflow(x)	((x)->status == ma_overflow)
61 #define mas_is_underflow(x)	((x)->status == ma_underflow)
62 
63 static int __init mtree_insert_index(struct maple_tree *mt,
64 				     unsigned long index, gfp_t gfp)
65 {
66 	return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
67 }
68 
69 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
70 {
71 	MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
72 	MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
73 }
74 
75 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
76 				void *ptr)
77 {
78 	return mtree_insert(mt, index, ptr, GFP_KERNEL);
79 }
80 
81 static int __init mtree_test_store_range(struct maple_tree *mt,
82 			unsigned long start, unsigned long end, void *ptr)
83 {
84 	return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
85 }
86 
87 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
88 				void *ptr)
89 {
90 	return mtree_test_store_range(mt, start, start, ptr);
91 }
92 
93 static int __init mtree_test_insert_range(struct maple_tree *mt,
94 			unsigned long start, unsigned long end, void *ptr)
95 {
96 	return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
97 }
98 
99 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
100 {
101 	return mtree_load(mt, index);
102 }
103 
104 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
105 {
106 	return mtree_erase(mt, index);
107 }
108 
109 #if defined(CONFIG_64BIT)
110 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
111 		unsigned long start, unsigned long end, unsigned long size,
112 		unsigned long expected, int eret, void *ptr)
113 {
114 
115 	unsigned long result = expected + 1;
116 	int ret;
117 
118 	ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
119 			GFP_KERNEL);
120 	MT_BUG_ON(mt, ret != eret);
121 	if (ret)
122 		return;
123 
124 	MT_BUG_ON(mt, result != expected);
125 }
126 
127 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
128 		unsigned long start, unsigned long end, unsigned long size,
129 		unsigned long expected, int eret, void *ptr)
130 {
131 
132 	unsigned long result = expected + 1;
133 	int ret;
134 
135 	ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
136 			GFP_KERNEL);
137 	MT_BUG_ON(mt, ret != eret);
138 	if (ret)
139 		return;
140 
141 	MT_BUG_ON(mt, result != expected);
142 }
143 #endif
144 
145 static noinline void __init check_load(struct maple_tree *mt,
146 				       unsigned long index, void *ptr)
147 {
148 	void *ret = mtree_test_load(mt, index);
149 
150 	if (ret != ptr)
151 		pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
152 	MT_BUG_ON(mt, ret != ptr);
153 }
154 
155 static noinline void __init check_store_range(struct maple_tree *mt,
156 		unsigned long start, unsigned long end, void *ptr, int expected)
157 {
158 	int ret = -EINVAL;
159 	unsigned long i;
160 
161 	ret = mtree_test_store_range(mt, start, end, ptr);
162 	MT_BUG_ON(mt, ret != expected);
163 
164 	if (ret)
165 		return;
166 
167 	for (i = start; i <= end; i++)
168 		check_load(mt, i, ptr);
169 }
170 
171 static noinline void __init check_insert_range(struct maple_tree *mt,
172 		unsigned long start, unsigned long end, void *ptr, int expected)
173 {
174 	int ret = -EINVAL;
175 	unsigned long i;
176 
177 	ret = mtree_test_insert_range(mt, start, end, ptr);
178 	MT_BUG_ON(mt, ret != expected);
179 
180 	if (ret)
181 		return;
182 
183 	for (i = start; i <= end; i++)
184 		check_load(mt, i, ptr);
185 }
186 
187 static noinline void __init check_insert(struct maple_tree *mt,
188 					 unsigned long index, void *ptr)
189 {
190 	int ret = -EINVAL;
191 
192 	ret = mtree_test_insert(mt, index, ptr);
193 	MT_BUG_ON(mt, ret != 0);
194 }
195 
196 static noinline void __init check_dup_insert(struct maple_tree *mt,
197 				      unsigned long index, void *ptr)
198 {
199 	int ret = -EINVAL;
200 
201 	ret = mtree_test_insert(mt, index, ptr);
202 	MT_BUG_ON(mt, ret != -EEXIST);
203 }
204 
205 
206 static noinline void __init check_index_load(struct maple_tree *mt,
207 					     unsigned long index)
208 {
209 	return check_load(mt, index, xa_mk_value(index & LONG_MAX));
210 }
211 
212 static inline __init int not_empty(struct maple_node *node)
213 {
214 	int i;
215 
216 	if (node->parent)
217 		return 1;
218 
219 	for (i = 0; i < ARRAY_SIZE(node->slot); i++)
220 		if (node->slot[i])
221 			return 1;
222 
223 	return 0;
224 }
225 
226 
227 static noinline void __init check_rev_seq(struct maple_tree *mt,
228 					  unsigned long max, bool verbose)
229 {
230 	unsigned long i = max, j;
231 
232 	MT_BUG_ON(mt, !mtree_empty(mt));
233 
234 	mt_zero_nr_tallocated();
235 	while (i) {
236 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
237 		for (j = i; j <= max; j++)
238 			check_index_load(mt, j);
239 
240 		check_load(mt, i - 1, NULL);
241 		mt_set_in_rcu(mt);
242 		MT_BUG_ON(mt, !mt_height(mt));
243 		mt_clear_in_rcu(mt);
244 		MT_BUG_ON(mt, !mt_height(mt));
245 		i--;
246 	}
247 	check_load(mt, max + 1, NULL);
248 
249 #ifndef __KERNEL__
250 	if (verbose) {
251 		rcu_barrier();
252 		mt_dump(mt, mt_dump_dec);
253 		pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
254 			__func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
255 			mt_nr_tallocated());
256 	}
257 #endif
258 }
259 
260 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
261 		bool verbose)
262 {
263 	unsigned long i, j;
264 
265 	MT_BUG_ON(mt, !mtree_empty(mt));
266 
267 	mt_zero_nr_tallocated();
268 	for (i = 0; i <= max; i++) {
269 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
270 		for (j = 0; j <= i; j++)
271 			check_index_load(mt, j);
272 
273 		if (i)
274 			MT_BUG_ON(mt, !mt_height(mt));
275 		check_load(mt, i + 1, NULL);
276 	}
277 
278 #ifndef __KERNEL__
279 	if (verbose) {
280 		rcu_barrier();
281 		mt_dump(mt, mt_dump_dec);
282 		pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
283 			max, mt_get_alloc_size()/1024, mt_nr_allocated(),
284 			mt_nr_tallocated());
285 	}
286 #endif
287 }
288 
289 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
290 {
291 	unsigned long i, j;
292 	unsigned long huge = 4000UL * 1000 * 1000;
293 
294 
295 	i = huge;
296 	while (i > 4096) {
297 		check_insert(mt, i, (void *) i);
298 		for (j = huge; j >= i; j /= 2) {
299 			check_load(mt, j-1, NULL);
300 			check_load(mt, j, (void *) j);
301 			check_load(mt, j+1, NULL);
302 		}
303 		i /= 2;
304 	}
305 	mtree_destroy(mt);
306 }
307 
308 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
309 {
310 	MT_BUG_ON(mt, !mtree_empty(mt));
311 	check_lb_not_empty(mt);
312 }
313 
314 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
315 {
316 	unsigned long i, j;
317 	unsigned long huge;
318 
319 	MT_BUG_ON(mt, !mtree_empty(mt));
320 
321 	if (MAPLE_32BIT)
322 		huge = 2147483647UL;
323 	else
324 		huge = 4000UL * 1000 * 1000;
325 
326 	i = 4096;
327 	while (i < huge) {
328 		check_insert(mt, i, (void *) i);
329 		for (j = i; j >= huge; j *= 2) {
330 			check_load(mt, j-1, NULL);
331 			check_load(mt, j, (void *) j);
332 			check_load(mt, j+1, NULL);
333 		}
334 		i *= 2;
335 	}
336 	mtree_destroy(mt);
337 }
338 
339 static noinline void __init check_mid_split(struct maple_tree *mt)
340 {
341 	unsigned long huge = 8000UL * 1000 * 1000;
342 
343 	check_insert(mt, huge, (void *) huge);
344 	check_insert(mt, 0, xa_mk_value(0));
345 	check_lb_not_empty(mt);
346 }
347 
348 static noinline void __init check_rev_find(struct maple_tree *mt)
349 {
350 	int i, nr_entries = 200;
351 	void *val;
352 	MA_STATE(mas, mt, 0, 0);
353 
354 	for (i = 0; i <= nr_entries; i++)
355 		mtree_store_range(mt, i*10, i*10 + 5,
356 				  xa_mk_value(i), GFP_KERNEL);
357 
358 	rcu_read_lock();
359 	mas_set(&mas, 1000);
360 	val = mas_find_rev(&mas, 1000);
361 	MT_BUG_ON(mt, val != xa_mk_value(100));
362 	val = mas_find_rev(&mas, 1000);
363 	MT_BUG_ON(mt, val != NULL);
364 
365 	mas_set(&mas, 999);
366 	val = mas_find_rev(&mas, 997);
367 	MT_BUG_ON(mt, val != NULL);
368 
369 	mas_set(&mas, 1000);
370 	val = mas_find_rev(&mas, 900);
371 	MT_BUG_ON(mt, val != xa_mk_value(100));
372 	val = mas_find_rev(&mas, 900);
373 	MT_BUG_ON(mt, val != xa_mk_value(99));
374 
375 	mas_set(&mas, 20);
376 	val = mas_find_rev(&mas, 0);
377 	MT_BUG_ON(mt, val != xa_mk_value(2));
378 	val = mas_find_rev(&mas, 0);
379 	MT_BUG_ON(mt, val != xa_mk_value(1));
380 	val = mas_find_rev(&mas, 0);
381 	MT_BUG_ON(mt, val != xa_mk_value(0));
382 	val = mas_find_rev(&mas, 0);
383 	MT_BUG_ON(mt, val != NULL);
384 	rcu_read_unlock();
385 }
386 
387 static noinline void __init check_find(struct maple_tree *mt)
388 {
389 	unsigned long val = 0;
390 	unsigned long count;
391 	unsigned long max;
392 	unsigned long top;
393 	unsigned long last = 0, index = 0;
394 	void *entry, *entry2;
395 
396 	MA_STATE(mas, mt, 0, 0);
397 
398 	/* Insert 0. */
399 	MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
400 
401 #if defined(CONFIG_64BIT)
402 	top = 4398046511104UL;
403 #else
404 	top = ULONG_MAX;
405 #endif
406 
407 	if (MAPLE_32BIT) {
408 		count = 15;
409 	} else {
410 		count = 20;
411 	}
412 
413 	for (int i = 0; i <= count; i++) {
414 		if (val != 64)
415 			MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
416 		else
417 			MT_BUG_ON(mt, mtree_insert(mt, val,
418 				XA_ZERO_ENTRY, GFP_KERNEL));
419 
420 		val <<= 2;
421 	}
422 
423 	val = 0;
424 	mas_set(&mas, val);
425 	mas_lock(&mas);
426 	while ((entry = mas_find(&mas, 268435456)) != NULL) {
427 		if (val != 64)
428 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
429 		else
430 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
431 
432 		val <<= 2;
433 		/* For zero check. */
434 		if (!val)
435 			val = 1;
436 	}
437 	mas_unlock(&mas);
438 
439 	val = 0;
440 	mas_set(&mas, val);
441 	mas_lock(&mas);
442 	mas_for_each(&mas, entry, ULONG_MAX) {
443 		if (val != 64)
444 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
445 		else
446 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
447 		val <<= 2;
448 		/* For zero check. */
449 		if (!val)
450 			val = 1;
451 	}
452 	mas_unlock(&mas);
453 
454 	/* Test mas_pause */
455 	val = 0;
456 	mas_set(&mas, val);
457 	mas_lock(&mas);
458 	mas_for_each(&mas, entry, ULONG_MAX) {
459 		if (val != 64)
460 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
461 		else
462 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
463 		val <<= 2;
464 		/* For zero check. */
465 		if (!val)
466 			val = 1;
467 
468 		mas_pause(&mas);
469 		mas_unlock(&mas);
470 		mas_lock(&mas);
471 	}
472 	mas_unlock(&mas);
473 
474 	val = 0;
475 	max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
476 	mt_for_each(mt, entry, index, max) {
477 		MT_BUG_ON(mt, xa_mk_value(val) != entry);
478 		val <<= 2;
479 		if (val == 64) /* Skip zero entry. */
480 			val <<= 2;
481 		/* For zero check. */
482 		if (!val)
483 			val = 1;
484 	}
485 
486 	val = 0;
487 	max = 0;
488 	index = 0;
489 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
490 	mt_for_each(mt, entry, index, ULONG_MAX) {
491 		if (val == top)
492 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
493 		else
494 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
495 
496 		/* Workaround for 32bit */
497 		if ((val << 2) < val)
498 			val = ULONG_MAX;
499 		else
500 			val <<= 2;
501 
502 		if (val == 64) /* Skip zero entry. */
503 			val <<= 2;
504 		/* For zero check. */
505 		if (!val)
506 			val = 1;
507 		max++;
508 		MT_BUG_ON(mt, max > 25);
509 	}
510 	mtree_erase_index(mt, ULONG_MAX);
511 
512 	mas_reset(&mas);
513 	index = 17;
514 	entry = mt_find(mt, &index, 512);
515 	MT_BUG_ON(mt, xa_mk_value(256) != entry);
516 
517 	mas_reset(&mas);
518 	index = 17;
519 	entry = mt_find(mt, &index, 20);
520 	MT_BUG_ON(mt, entry != NULL);
521 
522 
523 	/* Range check.. */
524 	/* Insert ULONG_MAX */
525 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
526 
527 	val = 0;
528 	mas_set(&mas, 0);
529 	mas_lock(&mas);
530 	mas_for_each(&mas, entry, ULONG_MAX) {
531 		if (val == 64)
532 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
533 		else if (val == top)
534 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
535 		else
536 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
537 
538 		/* Workaround for 32bit */
539 		if ((val << 2) < val)
540 			val = ULONG_MAX;
541 		else
542 			val <<= 2;
543 
544 		/* For zero check. */
545 		if (!val)
546 			val = 1;
547 		mas_pause(&mas);
548 		mas_unlock(&mas);
549 		mas_lock(&mas);
550 	}
551 	mas_unlock(&mas);
552 
553 	mas_set(&mas, 1048576);
554 	mas_lock(&mas);
555 	entry = mas_find(&mas, 1048576);
556 	mas_unlock(&mas);
557 	MT_BUG_ON(mas.tree, entry == NULL);
558 
559 	/*
560 	 * Find last value.
561 	 * 1. get the expected value, leveraging the existence of an end entry
562 	 * 2. delete end entry
563 	 * 3. find the last value but searching for ULONG_MAX and then using
564 	 * prev
565 	 */
566 	/* First, get the expected result. */
567 	mas_lock(&mas);
568 	mas_reset(&mas);
569 	mas.index = ULONG_MAX; /* start at max.. */
570 	entry = mas_find(&mas, ULONG_MAX);
571 	entry = mas_prev(&mas, 0);
572 	index = mas.index;
573 	last = mas.last;
574 
575 	/* Erase the last entry. */
576 	mas_reset(&mas);
577 	mas.index = ULONG_MAX;
578 	mas.last = ULONG_MAX;
579 	mas_erase(&mas);
580 
581 	/* Get the previous value from MAS_START */
582 	mas_reset(&mas);
583 	entry2 = mas_prev(&mas, 0);
584 
585 	/* Check results. */
586 	MT_BUG_ON(mt, entry != entry2);
587 	MT_BUG_ON(mt, index != mas.index);
588 	MT_BUG_ON(mt, last != mas.last);
589 
590 
591 	mas.status = ma_none;
592 	mas.index = ULONG_MAX;
593 	mas.last = ULONG_MAX;
594 	entry2 = mas_prev(&mas, 0);
595 	MT_BUG_ON(mt, entry != entry2);
596 
597 	mas_set(&mas, 0);
598 	MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
599 
600 	mas_unlock(&mas);
601 	mtree_destroy(mt);
602 }
603 
604 static noinline void __init check_find_2(struct maple_tree *mt)
605 {
606 	unsigned long i, j;
607 	void *entry;
608 
609 	MA_STATE(mas, mt, 0, 0);
610 	rcu_read_lock();
611 	mas_for_each(&mas, entry, ULONG_MAX)
612 		MT_BUG_ON(mt, true);
613 	rcu_read_unlock();
614 
615 	for (i = 0; i < 256; i++) {
616 		mtree_insert_index(mt, i, GFP_KERNEL);
617 		j = 0;
618 		mas_set(&mas, 0);
619 		rcu_read_lock();
620 		mas_for_each(&mas, entry, ULONG_MAX) {
621 			MT_BUG_ON(mt, entry != xa_mk_value(j));
622 			j++;
623 		}
624 		rcu_read_unlock();
625 		MT_BUG_ON(mt, j != i + 1);
626 	}
627 
628 	for (i = 0; i < 256; i++) {
629 		mtree_erase_index(mt, i);
630 		j = i + 1;
631 		mas_set(&mas, 0);
632 		rcu_read_lock();
633 		mas_for_each(&mas, entry, ULONG_MAX) {
634 			if (xa_is_zero(entry))
635 				continue;
636 
637 			MT_BUG_ON(mt, entry != xa_mk_value(j));
638 			j++;
639 		}
640 		rcu_read_unlock();
641 		MT_BUG_ON(mt, j != 256);
642 	}
643 
644 	/*MT_BUG_ON(mt, !mtree_empty(mt)); */
645 }
646 
647 
648 #if defined(CONFIG_64BIT)
649 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
650 {
651 	/*
652 	 * Generated by:
653 	 * cat /proc/self/maps | awk '{print $1}'|
654 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
655 	 */
656 
657 	static const unsigned long range[] = {
658 	/*      Inclusive     , Exclusive. */
659 		0x565234af2000, 0x565234af4000,
660 		0x565234af4000, 0x565234af9000,
661 		0x565234af9000, 0x565234afb000,
662 		0x565234afc000, 0x565234afd000,
663 		0x565234afd000, 0x565234afe000,
664 		0x565235def000, 0x565235e10000,
665 		0x7f36d4bfd000, 0x7f36d4ee2000,
666 		0x7f36d4ee2000, 0x7f36d4f04000,
667 		0x7f36d4f04000, 0x7f36d504c000,
668 		0x7f36d504c000, 0x7f36d5098000,
669 		0x7f36d5098000, 0x7f36d5099000,
670 		0x7f36d5099000, 0x7f36d509d000,
671 		0x7f36d509d000, 0x7f36d509f000,
672 		0x7f36d509f000, 0x7f36d50a5000,
673 		0x7f36d50b9000, 0x7f36d50db000,
674 		0x7f36d50db000, 0x7f36d50dc000,
675 		0x7f36d50dc000, 0x7f36d50fa000,
676 		0x7f36d50fa000, 0x7f36d5102000,
677 		0x7f36d5102000, 0x7f36d5103000,
678 		0x7f36d5103000, 0x7f36d5104000,
679 		0x7f36d5104000, 0x7f36d5105000,
680 		0x7fff5876b000, 0x7fff5878d000,
681 		0x7fff5878e000, 0x7fff58791000,
682 		0x7fff58791000, 0x7fff58793000,
683 	};
684 
685 	static const unsigned long holes[] = {
686 		/*
687 		 * Note: start of hole is INCLUSIVE
688 		 *        end of hole is EXCLUSIVE
689 		 *        (opposite of the above table.)
690 		 * Start of hole, end of hole,  size of hole (+1)
691 		 */
692 		0x565234afb000, 0x565234afc000, 0x1000,
693 		0x565234afe000, 0x565235def000, 0x12F1000,
694 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
695 	};
696 
697 	/*
698 	 * req_range consists of 4 values.
699 	 * 1. min index
700 	 * 2. max index
701 	 * 3. size
702 	 * 4. number that should be returned.
703 	 * 5. return value
704 	 */
705 	static const unsigned long req_range[] = {
706 		0x565234af9000, /* Min */
707 		0x7fff58791000, /* Max */
708 		0x1000,         /* Size */
709 		0x7fff5878d << 12,  /* First rev hole of size 0x1000 */
710 		0,              /* Return value success. */
711 
712 		0x0,            /* Min */
713 		0x565234AF0 << 12,    /* Max */
714 		0x3000,         /* Size */
715 		0x565234AEE << 12,  /* max - 3. */
716 		0,              /* Return value success. */
717 
718 		0x0,            /* Min */
719 		-1,             /* Max */
720 		0x1000,         /* Size */
721 		562949953421311 << 12,/* First rev hole of size 0x1000 */
722 		0,              /* Return value success. */
723 
724 		0x0,            /* Min */
725 		0x7F36D5109 << 12,    /* Max */
726 		0x4000,         /* Size */
727 		0x7F36D5106 << 12,    /* First rev hole of size 0x4000 */
728 		0,              /* Return value success. */
729 
730 		/* Ascend test. */
731 		0x0,
732 		34148798628 << 12,
733 		19 << 12,
734 		34148797418 << 12,
735 		0x0,
736 
737 		/* Too big test. */
738 		0x0,
739 		18446744073709551615UL,
740 		562915594369134UL << 12,
741 		0x0,
742 		-EBUSY,
743 
744 		/* Single space test. */
745 		34148798725 << 12,
746 		34148798725 << 12,
747 		1 << 12,
748 		34148798725 << 12,
749 		0,
750 	};
751 
752 	int i, range_count = ARRAY_SIZE(range);
753 	int req_range_count = ARRAY_SIZE(req_range);
754 	unsigned long min = 0;
755 
756 	MA_STATE(mas, mt, 0, 0);
757 
758 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
759 			  GFP_KERNEL);
760 #define DEBUG_REV_RANGE 0
761 	for (i = 0; i < range_count; i += 2) {
762 		/* Inclusive, Inclusive (with the -1) */
763 
764 #if DEBUG_REV_RANGE
765 		pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
766 				(range[i + 1] >> 12) - 1);
767 #endif
768 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
769 				xa_mk_value(range[i] >> 12), 0);
770 		mt_validate(mt);
771 	}
772 
773 
774 	mas_lock(&mas);
775 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
776 #if DEBUG_REV_RANGE
777 		pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
778 				min, holes[i+1]>>12, holes[i+2]>>12,
779 				holes[i] >> 12);
780 #endif
781 		MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
782 					holes[i+1] >> 12,
783 					holes[i+2] >> 12));
784 #if DEBUG_REV_RANGE
785 		pr_debug("Found %lu %lu\n", mas.index, mas.last);
786 		pr_debug("gap %lu %lu\n", (holes[i] >> 12),
787 				(holes[i+1] >> 12));
788 #endif
789 		MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
790 		MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
791 		min = holes[i+1] >> 12;
792 		mas_reset(&mas);
793 	}
794 
795 	mas_unlock(&mas);
796 	for (i = 0; i < req_range_count; i += 5) {
797 #if DEBUG_REV_RANGE
798 		pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
799 				i, req_range[i] >> 12,
800 				(req_range[i + 1] >> 12),
801 				req_range[i+2] >> 12,
802 				req_range[i+3] >> 12);
803 #endif
804 		check_mtree_alloc_rrange(mt,
805 				req_range[i]   >> 12, /* start */
806 				req_range[i+1] >> 12, /* end */
807 				req_range[i+2] >> 12, /* size */
808 				req_range[i+3] >> 12, /* expected address */
809 				req_range[i+4],       /* expected return */
810 				xa_mk_value(req_range[i] >> 12)); /* pointer */
811 		mt_validate(mt);
812 	}
813 
814 	mt_set_non_kernel(1);
815 	mtree_erase(mt, 34148798727); /* create a deleted range. */
816 	mtree_erase(mt, 34148798725);
817 	check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
818 			34148798725, 0, mt);
819 
820 	mtree_destroy(mt);
821 }
822 
823 static noinline void __init check_alloc_range(struct maple_tree *mt)
824 {
825 	/*
826 	 * Generated by:
827 	 * cat /proc/self/maps|awk '{print $1}'|
828 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
829 	 */
830 
831 	static const unsigned long range[] = {
832 	/*      Inclusive     , Exclusive. */
833 		0x565234af2000, 0x565234af4000,
834 		0x565234af4000, 0x565234af9000,
835 		0x565234af9000, 0x565234afb000,
836 		0x565234afc000, 0x565234afd000,
837 		0x565234afd000, 0x565234afe000,
838 		0x565235def000, 0x565235e10000,
839 		0x7f36d4bfd000, 0x7f36d4ee2000,
840 		0x7f36d4ee2000, 0x7f36d4f04000,
841 		0x7f36d4f04000, 0x7f36d504c000,
842 		0x7f36d504c000, 0x7f36d5098000,
843 		0x7f36d5098000, 0x7f36d5099000,
844 		0x7f36d5099000, 0x7f36d509d000,
845 		0x7f36d509d000, 0x7f36d509f000,
846 		0x7f36d509f000, 0x7f36d50a5000,
847 		0x7f36d50b9000, 0x7f36d50db000,
848 		0x7f36d50db000, 0x7f36d50dc000,
849 		0x7f36d50dc000, 0x7f36d50fa000,
850 		0x7f36d50fa000, 0x7f36d5102000,
851 		0x7f36d5102000, 0x7f36d5103000,
852 		0x7f36d5103000, 0x7f36d5104000,
853 		0x7f36d5104000, 0x7f36d5105000,
854 		0x7fff5876b000, 0x7fff5878d000,
855 		0x7fff5878e000, 0x7fff58791000,
856 		0x7fff58791000, 0x7fff58793000,
857 	};
858 	static const unsigned long holes[] = {
859 		/* Start of hole, end of hole,  size of hole (+1) */
860 		0x565234afb000, 0x565234afc000, 0x1000,
861 		0x565234afe000, 0x565235def000, 0x12F1000,
862 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
863 	};
864 
865 	/*
866 	 * req_range consists of 4 values.
867 	 * 1. min index
868 	 * 2. max index
869 	 * 3. size
870 	 * 4. number that should be returned.
871 	 * 5. return value
872 	 */
873 	static const unsigned long req_range[] = {
874 		0x565234af9000, /* Min */
875 		0x7fff58791000, /* Max */
876 		0x1000,         /* Size */
877 		0x565234afb000, /* First hole in our data of size 1000. */
878 		0,              /* Return value success. */
879 
880 		0x0,            /* Min */
881 		0x7fff58791000, /* Max */
882 		0x1F00,         /* Size */
883 		0x0,            /* First hole in our data of size 2000. */
884 		0,              /* Return value success. */
885 
886 		/* Test ascend. */
887 		34148797436 << 12, /* Min */
888 		0x7fff587AF000,    /* Max */
889 		0x3000,         /* Size */
890 		34148798629 << 12,             /* Expected location */
891 		0,              /* Return value success. */
892 
893 		/* Test failing. */
894 		34148798623 << 12,  /* Min */
895 		34148798683 << 12,  /* Max */
896 		0x15000,            /* Size */
897 		0,             /* Expected location */
898 		-EBUSY,              /* Return value failed. */
899 
900 		/* Test filling entire gap. */
901 		34148798623 << 12,  /* Min */
902 		0x7fff587AF000,    /* Max */
903 		0x10000,           /* Size */
904 		34148798632 << 12,             /* Expected location */
905 		0,              /* Return value success. */
906 
907 		/* Test walking off the end of root. */
908 		0,                  /* Min */
909 		-1,                 /* Max */
910 		-1,                 /* Size */
911 		0,                  /* Expected location */
912 		-EBUSY,             /* Return value failure. */
913 
914 		/* Test looking for too large a hole across entire range. */
915 		0,                  /* Min */
916 		-1,                 /* Max */
917 		4503599618982063UL << 12,  /* Size */
918 		34359052178 << 12,  /* Expected location */
919 		-EBUSY,             /* Return failure. */
920 
921 		/* Test a single entry */
922 		34148798648 << 12,		/* Min */
923 		34148798648 << 12,		/* Max */
924 		4096,			/* Size of 1 */
925 		34148798648 << 12,	/* Location is the same as min/max */
926 		0,			/* Success */
927 	};
928 	int i, range_count = ARRAY_SIZE(range);
929 	int req_range_count = ARRAY_SIZE(req_range);
930 	unsigned long min = 0x565234af2000;
931 	MA_STATE(mas, mt, 0, 0);
932 
933 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
934 			  GFP_KERNEL);
935 	for (i = 0; i < range_count; i += 2) {
936 #define DEBUG_ALLOC_RANGE 0
937 #if DEBUG_ALLOC_RANGE
938 		pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
939 			 (range[i + 1] >> 12) - 1);
940 		mt_dump(mt, mt_dump_hex);
941 #endif
942 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
943 				xa_mk_value(range[i] >> 12), 0);
944 		mt_validate(mt);
945 	}
946 
947 
948 
949 	mas_lock(&mas);
950 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
951 
952 #if DEBUG_ALLOC_RANGE
953 		pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
954 			holes[i+1] >> 12, holes[i+2] >> 12,
955 			min, holes[i+1]);
956 #endif
957 		MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
958 					holes[i+1] >> 12,
959 					holes[i+2] >> 12));
960 		MT_BUG_ON(mt, mas.index != holes[i] >> 12);
961 		min = holes[i+1];
962 		mas_reset(&mas);
963 	}
964 	mas_unlock(&mas);
965 	for (i = 0; i < req_range_count; i += 5) {
966 #if DEBUG_ALLOC_RANGE
967 		pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
968 			 i/5, req_range[i]   >> 12, req_range[i + 1]   >> 12,
969 			 req_range[i + 2]   >> 12, req_range[i + 3]   >> 12,
970 			 req_range[i], req_range[i+1]);
971 #endif
972 		check_mtree_alloc_range(mt,
973 				req_range[i]   >> 12, /* start */
974 				req_range[i+1] >> 12, /* end */
975 				req_range[i+2] >> 12, /* size */
976 				req_range[i+3] >> 12, /* expected address */
977 				req_range[i+4],       /* expected return */
978 				xa_mk_value(req_range[i] >> 12)); /* pointer */
979 		mt_validate(mt);
980 #if DEBUG_ALLOC_RANGE
981 		mt_dump(mt, mt_dump_hex);
982 #endif
983 	}
984 
985 	mtree_destroy(mt);
986 }
987 #endif
988 
989 static noinline void __init check_ranges(struct maple_tree *mt)
990 {
991 	int i, val, val2;
992 	static const unsigned long r[] = {
993 		10, 15,
994 		20, 25,
995 		17, 22, /* Overlaps previous range. */
996 		9, 1000, /* Huge. */
997 		100, 200,
998 		45, 168,
999 		118, 128,
1000 			};
1001 
1002 	MT_BUG_ON(mt, !mtree_empty(mt));
1003 	check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
1004 	check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
1005 	check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1006 	MT_BUG_ON(mt, !mt_height(mt));
1007 	/* Store */
1008 	check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1009 	check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1010 	check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1011 	MT_BUG_ON(mt, !mt_height(mt));
1012 	mtree_destroy(mt);
1013 	MT_BUG_ON(mt, mt_height(mt));
1014 
1015 	check_seq(mt, 50, false);
1016 	mt_set_non_kernel(4);
1017 	check_store_range(mt, 5, 47,  xa_mk_value(47), 0);
1018 	MT_BUG_ON(mt, !mt_height(mt));
1019 	mtree_destroy(mt);
1020 
1021 	/* Create tree of 1-100 */
1022 	check_seq(mt, 100, false);
1023 	/* Store 45-168 */
1024 	mt_set_non_kernel(10);
1025 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026 	MT_BUG_ON(mt, !mt_height(mt));
1027 	mt_validate(mt);
1028 	mtree_destroy(mt);
1029 
1030 	/* Create tree of 1-200 */
1031 	check_seq(mt, 200, false);
1032 	/* Store 45-168 */
1033 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1034 	MT_BUG_ON(mt, !mt_height(mt));
1035 	mt_validate(mt);
1036 	mtree_destroy(mt);
1037 
1038 	check_seq(mt, 30, false);
1039 	check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1040 	MT_BUG_ON(mt, !mt_height(mt));
1041 	mt_validate(mt);
1042 	mtree_destroy(mt);
1043 
1044 	/* Overwrite across multiple levels. */
1045 	/* Create tree of 1-400 */
1046 	check_seq(mt, 400, false);
1047 	mt_set_non_kernel(50);
1048 	/* Store 118-128 */
1049 	check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1050 	mt_set_non_kernel(50);
1051 	mtree_test_erase(mt, 140);
1052 	mtree_test_erase(mt, 141);
1053 	mtree_test_erase(mt, 142);
1054 	mtree_test_erase(mt, 143);
1055 	mtree_test_erase(mt, 130);
1056 	mtree_test_erase(mt, 131);
1057 	mtree_test_erase(mt, 132);
1058 	mtree_test_erase(mt, 133);
1059 	mtree_test_erase(mt, 134);
1060 	mtree_test_erase(mt, 135);
1061 	check_load(mt, r[12], xa_mk_value(r[12]));
1062 	check_load(mt, r[13], xa_mk_value(r[12]));
1063 	check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1064 	check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1065 	check_load(mt, 135, NULL);
1066 	check_load(mt, 140, NULL);
1067 	mt_validate(mt);
1068 	mt_set_non_kernel(0);
1069 	MT_BUG_ON(mt, !mt_height(mt));
1070 	mtree_destroy(mt);
1071 
1072 
1073 
1074 	/* Overwrite multiple levels at the end of the tree (slot 7) */
1075 	mt_set_non_kernel(50);
1076 	check_seq(mt, 400, false);
1077 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1078 	check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1079 
1080 	check_load(mt, 346, xa_mk_value(346));
1081 	for (i = 347; i <= 352; i++)
1082 		check_load(mt, i, xa_mk_value(347));
1083 	for (i = 353; i <= 361; i++)
1084 		check_load(mt, i, xa_mk_value(353));
1085 	check_load(mt, 362, xa_mk_value(362));
1086 	mt_set_non_kernel(0);
1087 	MT_BUG_ON(mt, !mt_height(mt));
1088 	mtree_destroy(mt);
1089 
1090 	mt_set_non_kernel(50);
1091 	check_seq(mt, 400, false);
1092 	check_store_range(mt, 352, 364, NULL, 0);
1093 	check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1094 	check_load(mt, 350, xa_mk_value(350));
1095 	check_load(mt, 351, xa_mk_value(352));
1096 	for (i = 352; i <= 363; i++)
1097 		check_load(mt, i, xa_mk_value(352));
1098 	check_load(mt, 364, NULL);
1099 	check_load(mt, 365, xa_mk_value(365));
1100 	mt_set_non_kernel(0);
1101 	MT_BUG_ON(mt, !mt_height(mt));
1102 	mtree_destroy(mt);
1103 
1104 	mt_set_non_kernel(5);
1105 	check_seq(mt, 400, false);
1106 	check_store_range(mt, 352, 364, NULL, 0);
1107 	check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1108 	check_load(mt, 350, xa_mk_value(350));
1109 	check_load(mt, 351, xa_mk_value(352));
1110 	for (i = 352; i <= 364; i++)
1111 		check_load(mt, i, xa_mk_value(352));
1112 	check_load(mt, 365, xa_mk_value(365));
1113 	mt_set_non_kernel(0);
1114 	MT_BUG_ON(mt, !mt_height(mt));
1115 	mtree_destroy(mt);
1116 
1117 
1118 	mt_set_non_kernel(50);
1119 	check_seq(mt, 400, false);
1120 	check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1121 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1122 	mt_set_non_kernel(0);
1123 	mt_validate(mt);
1124 	MT_BUG_ON(mt, !mt_height(mt));
1125 	mtree_destroy(mt);
1126 	/*
1127 	 * Interesting cases:
1128 	 * 1. Overwrite the end of a node and end in the first entry of the next
1129 	 * node.
1130 	 * 2. Split a single range
1131 	 * 3. Overwrite the start of a range
1132 	 * 4. Overwrite the end of a range
1133 	 * 5. Overwrite the entire range
1134 	 * 6. Overwrite a range that causes multiple parent nodes to be
1135 	 * combined
1136 	 * 7. Overwrite a range that causes multiple parent nodes and part of
1137 	 * root to be combined
1138 	 * 8. Overwrite the whole tree
1139 	 * 9. Try to overwrite the zero entry of an alloc tree.
1140 	 * 10. Write a range larger than a nodes current pivot
1141 	 */
1142 
1143 	mt_set_non_kernel(50);
1144 	for (i = 0; i <= 500; i++) {
1145 		val = i*5;
1146 		val2 = (i+1)*5;
1147 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1148 	}
1149 	check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1150 	check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1151 	check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1152 	check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1153 	check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1154 	mtree_destroy(mt);
1155 	mt_set_non_kernel(0);
1156 
1157 	mt_set_non_kernel(50);
1158 	for (i = 0; i <= 500; i++) {
1159 		val = i*5;
1160 		val2 = (i+1)*5;
1161 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1162 	}
1163 	check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1164 	check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1165 	check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1166 	check_store_range(mt, 2460, 2470, NULL, 0);
1167 	check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1168 	check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1169 	mt_set_non_kernel(0);
1170 	MT_BUG_ON(mt, !mt_height(mt));
1171 	mtree_destroy(mt);
1172 
1173 	/* Check in-place modifications */
1174 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1175 	/* Append to the start of last range */
1176 	mt_set_non_kernel(50);
1177 	for (i = 0; i <= 500; i++) {
1178 		val = i * 5 + 1;
1179 		val2 = val + 4;
1180 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1181 	}
1182 
1183 	/* Append to the last range without touching any boundaries */
1184 	for (i = 0; i < 10; i++) {
1185 		val = val2 + 5;
1186 		val2 = val + 4;
1187 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1188 	}
1189 
1190 	/* Append to the end of last range */
1191 	val = val2;
1192 	for (i = 0; i < 10; i++) {
1193 		val += 5;
1194 		MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1195 						     xa_mk_value(val)) != 0);
1196 	}
1197 
1198 	/* Overwriting the range and over a part of the next range */
1199 	for (i = 10; i < 30; i += 2) {
1200 		val = i * 5 + 1;
1201 		val2 = val + 5;
1202 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1203 	}
1204 
1205 	/* Overwriting a part of the range and over the next range */
1206 	for (i = 50; i < 70; i += 2) {
1207 		val2 = i * 5;
1208 		val = val2 - 5;
1209 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1210 	}
1211 
1212 	/*
1213 	 * Expand the range, only partially overwriting the previous and
1214 	 * next ranges
1215 	 */
1216 	for (i = 100; i < 130; i += 3) {
1217 		val = i * 5 - 5;
1218 		val2 = i * 5 + 1;
1219 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1220 	}
1221 
1222 	/*
1223 	 * Expand the range, only partially overwriting the previous and
1224 	 * next ranges, in RCU mode
1225 	 */
1226 	mt_set_in_rcu(mt);
1227 	for (i = 150; i < 180; i += 3) {
1228 		val = i * 5 - 5;
1229 		val2 = i * 5 + 1;
1230 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1231 	}
1232 
1233 	MT_BUG_ON(mt, !mt_height(mt));
1234 	mt_validate(mt);
1235 	mt_set_non_kernel(0);
1236 	mtree_destroy(mt);
1237 
1238 	/* Test rebalance gaps */
1239 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1240 	mt_set_non_kernel(50);
1241 	for (i = 0; i <= 50; i++) {
1242 		val = i*10;
1243 		val2 = (i+1)*10;
1244 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1245 	}
1246 	check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1247 	check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1248 	check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1249 	check_store_range(mt, 240, 249, NULL, 0);
1250 	mtree_erase(mt, 200);
1251 	mtree_erase(mt, 210);
1252 	mtree_erase(mt, 220);
1253 	mtree_erase(mt, 230);
1254 	mt_set_non_kernel(0);
1255 	MT_BUG_ON(mt, !mt_height(mt));
1256 	mtree_destroy(mt);
1257 
1258 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1259 	for (i = 0; i <= 500; i++) {
1260 		val = i*10;
1261 		val2 = (i+1)*10;
1262 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1263 	}
1264 	check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1265 	mt_validate(mt);
1266 	MT_BUG_ON(mt, !mt_height(mt));
1267 	mtree_destroy(mt);
1268 
1269 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1270 	for (i = 0; i <= 500; i++) {
1271 		val = i*10;
1272 		val2 = (i+1)*10;
1273 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1274 	}
1275 	check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1276 	check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1277 	check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1278 	check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1279 	check_store_range(mt, 4842, 4849, NULL, 0);
1280 	mt_validate(mt);
1281 	MT_BUG_ON(mt, !mt_height(mt));
1282 	mtree_destroy(mt);
1283 
1284 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1285 	for (i = 0; i <= 1300; i++) {
1286 		val = i*10;
1287 		val2 = (i+1)*10;
1288 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1289 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1290 	}
1291 	/*  Cause a 3 child split all the way up the tree. */
1292 	for (i = 5; i < 215; i += 10) {
1293 		check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1294 		mt_validate(mt);
1295 	}
1296 	for (i = 5; i < 65; i += 10) {
1297 		check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1298 		mt_validate(mt);
1299 	}
1300 
1301 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1302 	for (i = 5; i < 45; i += 10) {
1303 		check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1304 		mt_validate(mt);
1305 	}
1306 	if (!MAPLE_32BIT)
1307 		MT_BUG_ON(mt, mt_height(mt) < 4);
1308 	mtree_destroy(mt);
1309 
1310 
1311 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1312 	for (i = 0; i <= 1200; i++) {
1313 		val = i*10;
1314 		val2 = (i+1)*10;
1315 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1316 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1317 		mt_validate(mt);
1318 	}
1319 	/* Fill parents and leaves before split. */
1320 	val = 7660;
1321 	for (i = 5; i < 490; i += 5) {
1322 		val += 5;
1323 		check_store_range(mt, val, val + 1, NULL, 0);
1324 		mt_validate(mt);
1325 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1326 	}
1327 
1328 	val = 9460;
1329 	/* Fill parents and leaves before split. */
1330 	for (i = 1; i < 10; i++) {
1331 		val++;
1332 		check_store_range(mt, val, val + 1, xa_mk_value(val), 0);
1333 		mt_validate(mt);
1334 	}
1335 
1336 	val = 8000;
1337 	for (i = 1; i < 14; i++) {
1338 		val++;
1339 		check_store_range(mt, val, val + 1, xa_mk_value(val), 0);
1340 		mt_validate(mt);
1341 	}
1342 
1343 
1344 	check_store_range(mt, 8051, 8051, xa_mk_value(8081), 0);
1345 	check_store_range(mt, 8052, 8052, xa_mk_value(8082), 0);
1346 	check_store_range(mt, 8083, 8083, xa_mk_value(8083), 0);
1347 	check_store_range(mt, 8084, 8084, xa_mk_value(8084), 0);
1348 	check_store_range(mt, 8085, 8085, xa_mk_value(8085), 0);
1349 	/* triple split across multiple levels. */
1350 	check_store_range(mt, 8099, 8100, xa_mk_value(1), 0);
1351 
1352 	mt_validate(mt);
1353 	if (!MAPLE_32BIT)
1354 		MT_BUG_ON(mt, mt_height(mt) != 4);
1355 }
1356 
1357 static noinline void __init check_next_entry(struct maple_tree *mt)
1358 {
1359 	void *entry = NULL;
1360 	unsigned long limit = 30, i = 0;
1361 	MA_STATE(mas, mt, i, i);
1362 
1363 	MT_BUG_ON(mt, !mtree_empty(mt));
1364 
1365 	check_seq(mt, limit, false);
1366 	rcu_read_lock();
1367 
1368 	/* Check the first one and get ma_state in the correct state. */
1369 	MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1370 	for ( ; i <= limit + 1; i++) {
1371 		entry = mas_next(&mas, limit);
1372 		if (i > limit)
1373 			MT_BUG_ON(mt, entry != NULL);
1374 		else
1375 			MT_BUG_ON(mt, xa_mk_value(i) != entry);
1376 	}
1377 	rcu_read_unlock();
1378 	mtree_destroy(mt);
1379 }
1380 
1381 static noinline void __init check_prev_entry(struct maple_tree *mt)
1382 {
1383 	unsigned long index = 16;
1384 	void *value;
1385 	int i;
1386 
1387 	MA_STATE(mas, mt, index, index);
1388 
1389 	MT_BUG_ON(mt, !mtree_empty(mt));
1390 	check_seq(mt, 30, false);
1391 
1392 	rcu_read_lock();
1393 	value = mas_find(&mas, ULONG_MAX);
1394 	MT_BUG_ON(mt, value != xa_mk_value(index));
1395 	value = mas_prev(&mas, 0);
1396 	MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1397 	rcu_read_unlock();
1398 	mtree_destroy(mt);
1399 
1400 	/* Check limits on prev */
1401 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1402 	mas_lock(&mas);
1403 	for (i = 0; i <= index; i++) {
1404 		mas_set_range(&mas, i*10, i*10+5);
1405 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1406 	}
1407 
1408 	mas_set(&mas, 20);
1409 	value = mas_walk(&mas);
1410 	MT_BUG_ON(mt, value != xa_mk_value(2));
1411 
1412 	value = mas_prev(&mas, 19);
1413 	MT_BUG_ON(mt, value != NULL);
1414 
1415 	mas_set(&mas, 80);
1416 	value = mas_walk(&mas);
1417 	MT_BUG_ON(mt, value != xa_mk_value(8));
1418 
1419 	value = mas_prev(&mas, 76);
1420 	MT_BUG_ON(mt, value != NULL);
1421 
1422 	mas_unlock(&mas);
1423 }
1424 
1425 static noinline void __init check_store_null(struct maple_tree *mt)
1426 {
1427 	MA_STATE(mas, mt, 0, ULONG_MAX);
1428 
1429 	/*
1430 	 * Store NULL at range [0, ULONG_MAX] to an empty tree should result
1431 	 * in an empty tree
1432 	 */
1433 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1434 	mas_lock(&mas);
1435 	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1436 	MT_BUG_ON(mt, !mtree_empty(mt));
1437 	mas_unlock(&mas);
1438 	mtree_destroy(mt);
1439 
1440 	/*
1441 	 * Store NULL at any range to an empty tree should result in an empty
1442 	 * tree
1443 	 */
1444 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1445 	mas_lock(&mas);
1446 	mas_set_range(&mas, 3, 10);
1447 	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1448 	MT_BUG_ON(mt, !mtree_empty(mt));
1449 	mas_unlock(&mas);
1450 	mtree_destroy(mt);
1451 
1452 	/*
1453 	 * Store NULL at range [0, ULONG_MAX] to a single entry tree should
1454 	 * result in an empty tree
1455 	 */
1456 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1457 	mas_lock(&mas);
1458 	mas_set(&mas, 0);
1459 	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1460 	mas_set_range(&mas, 0, ULONG_MAX);
1461 	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1462 	MT_BUG_ON(mt, !mtree_empty(mt));
1463 	mas_unlock(&mas);
1464 	mtree_destroy(mt);
1465 
1466 	/*
1467 	 * Store NULL at range [0, n] to a single entry tree should
1468 	 * result in an empty tree
1469 	 */
1470 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1471 	mas_lock(&mas);
1472 	mas_set(&mas, 0);
1473 	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1474 	mas_set_range(&mas, 0, 5);
1475 	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1476 	MT_BUG_ON(mt, !mtree_empty(mt));
1477 	mas_unlock(&mas);
1478 	mtree_destroy(mt);
1479 
1480 	/*
1481 	 * Store NULL at range [m, n] where m > 0 to a single entry tree
1482 	 * should still be a single entry tree
1483 	 */
1484 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1485 	mas_lock(&mas);
1486 	mas_set(&mas, 0);
1487 	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1488 	mas_set_range(&mas, 2, 5);
1489 	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1490 	MT_BUG_ON(mt, mtree_empty(mt));
1491 //	MT_BUG_ON(mt, xa_is_node(mas_root(&mas)));
1492 	mas_unlock(&mas);
1493 	mtree_destroy(mt);
1494 
1495 	/*
1496 	 * Store NULL at range [0, ULONG_MAX] to a tree with node should
1497 	 * result in an empty tree
1498 	 */
1499 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1500 	mas_lock(&mas);
1501 	mas_set_range(&mas, 1, 3);
1502 	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1503 //	MT_BUG_ON(mt, !xa_is_node(mas_root(&mas)));
1504 	mas_set_range(&mas, 0, ULONG_MAX);
1505 	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1506 	MT_BUG_ON(mt, !mtree_empty(mt));
1507 	mas_unlock(&mas);
1508 	mtree_destroy(mt);
1509 }
1510 
1511 static noinline void __init check_root_expand(struct maple_tree *mt)
1512 {
1513 	MA_STATE(mas, mt, 0, 0);
1514 	void *ptr;
1515 
1516 
1517 	mas_lock(&mas);
1518 	mas_set(&mas, 3);
1519 	ptr = mas_walk(&mas);
1520 	MT_BUG_ON(mt, mas.index != 0);
1521 	MT_BUG_ON(mt, ptr != NULL);
1522 	MT_BUG_ON(mt, mas.index != 0);
1523 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1524 
1525 	ptr = &check_prev_entry;
1526 	mas_set(&mas, 1);
1527 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1528 
1529 	mas_set(&mas, 0);
1530 	ptr = mas_walk(&mas);
1531 	MT_BUG_ON(mt, ptr != NULL);
1532 
1533 	mas_set(&mas, 1);
1534 	ptr = mas_walk(&mas);
1535 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1536 
1537 	mas_set(&mas, 2);
1538 	ptr = mas_walk(&mas);
1539 	MT_BUG_ON(mt, ptr != NULL);
1540 	mas_unlock(&mas);
1541 	mtree_destroy(mt);
1542 
1543 
1544 	mt_init_flags(mt, 0);
1545 	mas_lock(&mas);
1546 
1547 	mas_set(&mas, 0);
1548 	ptr = &check_prev_entry;
1549 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1550 
1551 	mas_set(&mas, 5);
1552 	ptr = mas_walk(&mas);
1553 	MT_BUG_ON(mt, ptr != NULL);
1554 	MT_BUG_ON(mt, mas.index != 1);
1555 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1556 
1557 	mas_set_range(&mas, 0, 100);
1558 	ptr = mas_walk(&mas);
1559 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1560 	MT_BUG_ON(mt, mas.last != 0);
1561 	mas_unlock(&mas);
1562 	mtree_destroy(mt);
1563 
1564 	mt_init_flags(mt, 0);
1565 	mas_lock(&mas);
1566 
1567 	mas_set(&mas, 0);
1568 	ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1569 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1570 	ptr = mas_next(&mas, ULONG_MAX);
1571 	MT_BUG_ON(mt, ptr != NULL);
1572 	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1573 
1574 	mas_set(&mas, 1);
1575 	ptr = mas_prev(&mas, 0);
1576 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1577 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1578 
1579 	mas_unlock(&mas);
1580 
1581 	mtree_destroy(mt);
1582 
1583 	mt_init_flags(mt, 0);
1584 	mas_lock(&mas);
1585 	mas_set(&mas, 0);
1586 	ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1587 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1588 	ptr = mas_next(&mas, ULONG_MAX);
1589 	MT_BUG_ON(mt, ptr != NULL);
1590 	MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1591 
1592 	mas_set(&mas, 1);
1593 	ptr = mas_prev(&mas, 0);
1594 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1595 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1596 
1597 
1598 	mas_unlock(&mas);
1599 }
1600 
1601 static noinline void __init check_deficient_node(struct maple_tree *mt)
1602 {
1603 	MA_STATE(mas, mt, 0, 0);
1604 	int count;
1605 
1606 	mas_lock(&mas);
1607 	for (count = 0; count < 10; count++) {
1608 		mas_set(&mas, count);
1609 		mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1610 	}
1611 
1612 	for (count = 20; count < 39; count++) {
1613 		mas_set(&mas, count);
1614 		mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1615 	}
1616 
1617 	for (count = 10; count < 12; count++) {
1618 		mas_set(&mas, count);
1619 		mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
1620 	}
1621 	mas_unlock(&mas);
1622 	mt_validate(mt);
1623 }
1624 
1625 static noinline void __init check_gap_combining(struct maple_tree *mt)
1626 {
1627 	struct maple_enode *mn1, *mn2;
1628 	void *entry;
1629 	unsigned long singletons = 100;
1630 	static const unsigned long *seq100;
1631 	static const unsigned long seq100_64[] = {
1632 		/* 0-5 */
1633 		74, 75, 76,
1634 		50, 100, 2,
1635 
1636 		/* 6-12 */
1637 		44, 45, 46, 43,
1638 		20, 50, 3,
1639 
1640 		/* 13-20*/
1641 		80, 81, 82,
1642 		76, 2, 79, 85, 4,
1643 	};
1644 
1645 	static const unsigned long seq100_32[] = {
1646 		/* 0-5 */
1647 		61, 62, 63,
1648 		50, 100, 2,
1649 
1650 		/* 6-12 */
1651 		31, 32, 33, 30,
1652 		20, 50, 3,
1653 
1654 		/* 13-20*/
1655 		80, 81, 82,
1656 		76, 2, 79, 85, 4,
1657 	};
1658 
1659 	static const unsigned long seq2000[] = {
1660 		1152, 1151,
1661 		1100, 1200, 2,
1662 	};
1663 	static const unsigned long seq400[] = {
1664 		286, 318,
1665 		256, 260, 266, 270, 275, 280, 290, 398,
1666 		286, 310,
1667 	};
1668 
1669 	unsigned long index;
1670 
1671 	MA_STATE(mas, mt, 0, 0);
1672 
1673 	if (MAPLE_32BIT)
1674 		seq100 = seq100_32;
1675 	else
1676 		seq100 = seq100_64;
1677 
1678 	index = seq100[0];
1679 	mas_set(&mas, index);
1680 	MT_BUG_ON(mt, !mtree_empty(mt));
1681 	check_seq(mt, singletons, false); /* create 100 singletons. */
1682 
1683 	mt_set_non_kernel(1);
1684 	mtree_test_erase(mt, seq100[2]);
1685 	check_load(mt, seq100[2], NULL);
1686 	mtree_test_erase(mt, seq100[1]);
1687 	check_load(mt, seq100[1], NULL);
1688 
1689 	rcu_read_lock();
1690 	entry = mas_find(&mas, ULONG_MAX);
1691 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1692 	mn1 = mas.node;
1693 	mas_next(&mas, ULONG_MAX);
1694 	entry = mas_next(&mas, ULONG_MAX);
1695 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1696 	mn2 = mas.node;
1697 	MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1698 
1699 	/*
1700 	 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1701 	 * seq100[4]. Search for the gap.
1702 	 */
1703 	mt_set_non_kernel(1);
1704 	mas_reset(&mas);
1705 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1706 					     seq100[5]));
1707 	MT_BUG_ON(mt, mas.index != index + 1);
1708 	rcu_read_unlock();
1709 
1710 	mtree_test_erase(mt, seq100[6]);
1711 	check_load(mt, seq100[6], NULL);
1712 	mtree_test_erase(mt, seq100[7]);
1713 	check_load(mt, seq100[7], NULL);
1714 	mtree_test_erase(mt, seq100[8]);
1715 	index = seq100[9];
1716 
1717 	rcu_read_lock();
1718 	mas.index = index;
1719 	mas.last = index;
1720 	mas_reset(&mas);
1721 	entry = mas_find(&mas, ULONG_MAX);
1722 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1723 	mn1 = mas.node;
1724 	entry = mas_next(&mas, ULONG_MAX);
1725 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1726 	mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1727 	mn2 = mas.node;
1728 	MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1729 
1730 	/*
1731 	 * At this point, there is a gap of 3 at seq100[6].  Find it by
1732 	 * searching 20 - 50 for size 3.
1733 	 */
1734 	mas_reset(&mas);
1735 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1736 					     seq100[12]));
1737 	MT_BUG_ON(mt, mas.index != seq100[6]);
1738 	rcu_read_unlock();
1739 
1740 	mt_set_non_kernel(1);
1741 	mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1742 	check_load(mt, seq100[13], NULL);
1743 	check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1744 	mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1745 	check_load(mt, seq100[13], NULL);
1746 	check_load(mt, seq100[14], NULL);
1747 
1748 	mas_reset(&mas);
1749 	rcu_read_lock();
1750 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1751 					     seq100[17]));
1752 	MT_BUG_ON(mt, mas.index != seq100[13]);
1753 	mt_validate(mt);
1754 	rcu_read_unlock();
1755 
1756 	/*
1757 	 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1758 	 * gap.
1759 	 */
1760 	mt_set_non_kernel(2);
1761 	mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1762 	mtree_test_erase(mt, seq100[15]);
1763 	mas_reset(&mas);
1764 	rcu_read_lock();
1765 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1766 					     seq100[20]));
1767 	rcu_read_unlock();
1768 	MT_BUG_ON(mt, mas.index != seq100[18]);
1769 	mt_validate(mt);
1770 	mtree_destroy(mt);
1771 
1772 	/* seq 2000 tests are for multi-level tree gaps */
1773 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1774 	check_seq(mt, 2000, false);
1775 	mt_set_non_kernel(1);
1776 	mtree_test_erase(mt, seq2000[0]);
1777 	mtree_test_erase(mt, seq2000[1]);
1778 
1779 	mt_set_non_kernel(2);
1780 	mas_reset(&mas);
1781 	rcu_read_lock();
1782 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1783 					     seq2000[4]));
1784 	MT_BUG_ON(mt, mas.index != seq2000[1]);
1785 	rcu_read_unlock();
1786 	mt_validate(mt);
1787 	mtree_destroy(mt);
1788 
1789 	/* seq 400 tests rebalancing over two levels. */
1790 	mt_set_non_kernel(99);
1791 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1792 	check_seq(mt, 400, false);
1793 	mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1794 	mt_set_non_kernel(0);
1795 	mtree_destroy(mt);
1796 
1797 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1798 	check_seq(mt, 400, false);
1799 	mt_set_non_kernel(50);
1800 	mtree_test_store_range(mt, seq400[2], seq400[9],
1801 			       xa_mk_value(seq400[2]));
1802 	mtree_test_store_range(mt, seq400[3], seq400[9],
1803 			       xa_mk_value(seq400[3]));
1804 	mtree_test_store_range(mt, seq400[4], seq400[9],
1805 			       xa_mk_value(seq400[4]));
1806 	mtree_test_store_range(mt, seq400[5], seq400[9],
1807 			       xa_mk_value(seq400[5]));
1808 	mtree_test_store_range(mt, seq400[0], seq400[9],
1809 			       xa_mk_value(seq400[0]));
1810 	mtree_test_store_range(mt, seq400[6], seq400[9],
1811 			       xa_mk_value(seq400[6]));
1812 	mtree_test_store_range(mt, seq400[7], seq400[9],
1813 			       xa_mk_value(seq400[7]));
1814 	mtree_test_store_range(mt, seq400[8], seq400[9],
1815 			       xa_mk_value(seq400[8]));
1816 	mtree_test_store_range(mt, seq400[10], seq400[11],
1817 			       xa_mk_value(seq400[10]));
1818 	mt_validate(mt);
1819 	mt_set_non_kernel(0);
1820 	mtree_destroy(mt);
1821 }
1822 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1823 {
1824 	int i, max = 4000;
1825 
1826 	for (i = 0; i < max; i++)
1827 		mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1828 
1829 	mtree_test_store_range(mt, 319951, 367950, NULL);
1830 	/*mt_dump(mt, mt_dump_dec); */
1831 	mt_validate(mt);
1832 }
1833 
1834 #if defined(BENCH_SLOT_STORE)
1835 static noinline void __init bench_slot_store(struct maple_tree *mt)
1836 {
1837 	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1838 
1839 	for (i = 0; i < max; i += 10)
1840 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1841 
1842 	for (i = 0; i < count; i++) {
1843 		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1844 		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1845 				  GFP_KERNEL);
1846 	}
1847 }
1848 #endif
1849 
1850 #if defined(BENCH_NODE_STORE)
1851 static noinline void __init bench_node_store(struct maple_tree *mt)
1852 {
1853 	int i, overwrite = 76, max = 240, count = 20000000;
1854 
1855 	for (i = 0; i < max; i += 10)
1856 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1857 
1858 	for (i = 0; i < count; i++) {
1859 		mtree_store_range(mt, overwrite,  overwrite + 15,
1860 				  xa_mk_value(overwrite), GFP_KERNEL);
1861 
1862 		overwrite += 5;
1863 		if (overwrite >= 135)
1864 			overwrite = 76;
1865 	}
1866 }
1867 #endif
1868 
1869 #if defined(BENCH_AWALK)
1870 static noinline void __init bench_awalk(struct maple_tree *mt)
1871 {
1872 	int i, max = 2500, count = 50000000;
1873 	MA_STATE(mas, mt, 1470, 1470);
1874 
1875 	for (i = 0; i < max; i += 10)
1876 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1877 
1878 	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1879 
1880 	for (i = 0; i < count; i++) {
1881 		mas_empty_area_rev(&mas, 0, 2000, 10);
1882 		mas_reset(&mas);
1883 	}
1884 }
1885 #endif
1886 #if defined(BENCH_WALK)
1887 static noinline void __init bench_walk(struct maple_tree *mt)
1888 {
1889 	int i, max = 2500, count = 550000000;
1890 	MA_STATE(mas, mt, 1470, 1470);
1891 
1892 	for (i = 0; i < max; i += 10)
1893 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1894 
1895 	for (i = 0; i < count; i++) {
1896 		mas_walk(&mas);
1897 		mas_reset(&mas);
1898 	}
1899 
1900 }
1901 #endif
1902 
1903 #if defined(BENCH_LOAD)
1904 static noinline void __init bench_load(struct maple_tree *mt)
1905 {
1906 	int i, max = 2500, count = 550000000;
1907 
1908 	for (i = 0; i < max; i += 10)
1909 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1910 
1911 	for (i = 0; i < count; i++)
1912 		mtree_load(mt, 1470);
1913 }
1914 #endif
1915 
1916 #if defined(BENCH_MT_FOR_EACH)
1917 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1918 {
1919 	int i, count = 1000000;
1920 	unsigned long max = 2500, index = 0;
1921 	void *entry;
1922 
1923 	for (i = 0; i < max; i += 5)
1924 		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1925 
1926 	for (i = 0; i < count; i++) {
1927 		unsigned long j = 0;
1928 
1929 		mt_for_each(mt, entry, index, max) {
1930 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1931 			j += 5;
1932 		}
1933 
1934 		index = 0;
1935 	}
1936 
1937 }
1938 #endif
1939 
1940 #if defined(BENCH_MAS_FOR_EACH)
1941 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1942 {
1943 	int i, count = 1000000;
1944 	unsigned long max = 2500;
1945 	void *entry;
1946 	MA_STATE(mas, mt, 0, 0);
1947 
1948 	for (i = 0; i < max; i += 5) {
1949 		int gap = 4;
1950 
1951 		if (i % 30 == 0)
1952 			gap = 3;
1953 		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1954 	}
1955 
1956 	rcu_read_lock();
1957 	for (i = 0; i < count; i++) {
1958 		unsigned long j = 0;
1959 
1960 		mas_for_each(&mas, entry, max) {
1961 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1962 			j += 5;
1963 		}
1964 		mas_set(&mas, 0);
1965 	}
1966 	rcu_read_unlock();
1967 
1968 }
1969 #endif
1970 #if defined(BENCH_MAS_PREV)
1971 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1972 {
1973 	int i, count = 1000000;
1974 	unsigned long max = 2500;
1975 	void *entry;
1976 	MA_STATE(mas, mt, 0, 0);
1977 
1978 	for (i = 0; i < max; i += 5) {
1979 		int gap = 4;
1980 
1981 		if (i % 30 == 0)
1982 			gap = 3;
1983 		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1984 	}
1985 
1986 	rcu_read_lock();
1987 	for (i = 0; i < count; i++) {
1988 		unsigned long j = 2495;
1989 
1990 		mas_set(&mas, ULONG_MAX);
1991 		while ((entry = mas_prev(&mas, 0)) != NULL) {
1992 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1993 			j -= 5;
1994 		}
1995 	}
1996 	rcu_read_unlock();
1997 
1998 }
1999 #endif
2000 /* check_forking - simulate the kernel forking sequence with the tree. */
2001 static noinline void __init check_forking(void)
2002 {
2003 	struct maple_tree mt, newmt;
2004 	int i, nr_entries = 134, ret;
2005 	void *val;
2006 	MA_STATE(mas, &mt, 0, 0);
2007 	MA_STATE(newmas, &newmt, 0, 0);
2008 	struct rw_semaphore mt_lock, newmt_lock;
2009 
2010 	init_rwsem(&mt_lock);
2011 	init_rwsem(&newmt_lock);
2012 
2013 	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2014 	mt_set_external_lock(&mt, &mt_lock);
2015 
2016 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2017 	mt_set_external_lock(&newmt, &newmt_lock);
2018 
2019 	down_write(&mt_lock);
2020 	for (i = 0; i <= nr_entries; i++) {
2021 		mas_set_range(&mas, i*10, i*10 + 5);
2022 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2023 	}
2024 
2025 	down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2026 	ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2027 	if (ret) {
2028 		pr_err("OOM!");
2029 		BUG_ON(1);
2030 	}
2031 
2032 	mas_set(&newmas, 0);
2033 	mas_for_each(&newmas, val, ULONG_MAX)
2034 		mas_store(&newmas, val);
2035 
2036 	mas_destroy(&newmas);
2037 	mas_destroy(&mas);
2038 	mt_validate(&newmt);
2039 	__mt_destroy(&newmt);
2040 	__mt_destroy(&mt);
2041 	up_write(&newmt_lock);
2042 	up_write(&mt_lock);
2043 }
2044 
2045 static noinline void __init check_iteration(struct maple_tree *mt)
2046 {
2047 	int i, nr_entries = 125;
2048 	void *val;
2049 	MA_STATE(mas, mt, 0, 0);
2050 
2051 	for (i = 0; i <= nr_entries; i++)
2052 		mtree_store_range(mt, i * 10, i * 10 + 9,
2053 				  xa_mk_value(i), GFP_KERNEL);
2054 
2055 	mt_set_non_kernel(99999);
2056 
2057 	i = 0;
2058 	mas_lock(&mas);
2059 	mas_for_each(&mas, val, 925) {
2060 		MT_BUG_ON(mt, mas.index != i * 10);
2061 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
2062 		/* Overwrite end of entry 92 */
2063 		if (i == 92) {
2064 			mas.index = 925;
2065 			mas.last = 929;
2066 			mas_store(&mas, val);
2067 		}
2068 		i++;
2069 	}
2070 	/* Ensure mas_find() gets the next value */
2071 	val = mas_find(&mas, ULONG_MAX);
2072 	MT_BUG_ON(mt, val != xa_mk_value(i));
2073 
2074 	mas_set(&mas, 0);
2075 	i = 0;
2076 	mas_for_each(&mas, val, 785) {
2077 		MT_BUG_ON(mt, mas.index != i * 10);
2078 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
2079 		/* Overwrite start of entry 78 */
2080 		if (i == 78) {
2081 			mas.index = 780;
2082 			mas.last = 785;
2083 			mas_store(&mas, val);
2084 		} else {
2085 			i++;
2086 		}
2087 	}
2088 	val = mas_find(&mas, ULONG_MAX);
2089 	MT_BUG_ON(mt, val != xa_mk_value(i));
2090 
2091 	mas_set(&mas, 0);
2092 	i = 0;
2093 	mas_for_each(&mas, val, 765) {
2094 		MT_BUG_ON(mt, mas.index != i * 10);
2095 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
2096 		/* Overwrite end of entry 76 and advance to the end */
2097 		if (i == 76) {
2098 			mas.index = 760;
2099 			mas.last = 765;
2100 			mas_store(&mas, val);
2101 		}
2102 		i++;
2103 	}
2104 	/* Make sure the next find returns the one after 765, 766-769 */
2105 	val = mas_find(&mas, ULONG_MAX);
2106 	MT_BUG_ON(mt, val != xa_mk_value(76));
2107 	mas_unlock(&mas);
2108 	mas_destroy(&mas);
2109 	mt_set_non_kernel(0);
2110 }
2111 
2112 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
2113 {
2114 
2115 	struct maple_tree newmt;
2116 	int i, nr_entries = 135;
2117 	void *val;
2118 	MA_STATE(mas, mt, 0, 0);
2119 	MA_STATE(newmas, mt, 0, 0);
2120 
2121 	for (i = 0; i <= nr_entries; i++)
2122 		mtree_store_range(mt, i*10, i*10 + 5,
2123 				  xa_mk_value(i), GFP_KERNEL);
2124 
2125 	mt_set_non_kernel(99999);
2126 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2127 	newmas.tree = &newmt;
2128 	rcu_read_lock();
2129 	mas_lock(&newmas);
2130 	mas_reset(&newmas);
2131 	mas_set(&mas, 0);
2132 	mas_for_each(&mas, val, ULONG_MAX) {
2133 		newmas.index = mas.index;
2134 		newmas.last = mas.last;
2135 		mas_store_gfp(&newmas, val, GFP_KERNEL);
2136 	}
2137 	mas_unlock(&newmas);
2138 	rcu_read_unlock();
2139 	mt_validate(&newmt);
2140 	mt_set_non_kernel(0);
2141 	mtree_destroy(&newmt);
2142 }
2143 
2144 #if defined(BENCH_FORK)
2145 static noinline void __init bench_forking(void)
2146 {
2147 	struct maple_tree mt, newmt;
2148 	int i, nr_entries = 134, nr_fork = 80000, ret;
2149 	void *val;
2150 	MA_STATE(mas, &mt, 0, 0);
2151 	MA_STATE(newmas, &newmt, 0, 0);
2152 	struct rw_semaphore mt_lock, newmt_lock;
2153 
2154 	init_rwsem(&mt_lock);
2155 	init_rwsem(&newmt_lock);
2156 
2157 	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2158 	mt_set_external_lock(&mt, &mt_lock);
2159 
2160 	down_write(&mt_lock);
2161 	for (i = 0; i <= nr_entries; i++) {
2162 		mas_set_range(&mas, i*10, i*10 + 5);
2163 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2164 	}
2165 
2166 	for (i = 0; i < nr_fork; i++) {
2167 		mt_init_flags(&newmt,
2168 			      MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2169 		mt_set_external_lock(&newmt, &newmt_lock);
2170 
2171 		down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2172 		ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2173 		if (ret) {
2174 			pr_err("OOM!");
2175 			BUG_ON(1);
2176 		}
2177 
2178 		mas_set(&newmas, 0);
2179 		mas_for_each(&newmas, val, ULONG_MAX)
2180 			mas_store(&newmas, val);
2181 
2182 		mas_destroy(&newmas);
2183 		mt_validate(&newmt);
2184 		__mt_destroy(&newmt);
2185 		up_write(&newmt_lock);
2186 	}
2187 	mas_destroy(&mas);
2188 	__mt_destroy(&mt);
2189 	up_write(&mt_lock);
2190 }
2191 #endif
2192 
2193 static noinline void __init next_prev_test(struct maple_tree *mt)
2194 {
2195 	int i, nr_entries;
2196 	void *val;
2197 	MA_STATE(mas, mt, 0, 0);
2198 	struct maple_enode *mn;
2199 	static const unsigned long *level2;
2200 	static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2201 						   725};
2202 	static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2203 						   1760, 1765};
2204 	unsigned long last_index;
2205 
2206 	if (MAPLE_32BIT) {
2207 		nr_entries = 500;
2208 		level2 = level2_32;
2209 		last_index = 0x138e;
2210 	} else {
2211 		nr_entries = 200;
2212 		level2 = level2_64;
2213 		last_index = 0x7d6;
2214 	}
2215 
2216 	for (i = 0; i <= nr_entries; i++)
2217 		mtree_store_range(mt, i*10, i*10 + 5,
2218 				  xa_mk_value(i), GFP_KERNEL);
2219 
2220 	mas_lock(&mas);
2221 	for (i = 0; i <= nr_entries / 2; i++) {
2222 		mas_next(&mas, 1000);
2223 		if (mas_is_none(&mas))
2224 			break;
2225 
2226 	}
2227 	mas_reset(&mas);
2228 	mas_set(&mas, 0);
2229 	i = 0;
2230 	mas_for_each(&mas, val, 1000) {
2231 		i++;
2232 	}
2233 
2234 	mas_reset(&mas);
2235 	mas_set(&mas, 0);
2236 	i = 0;
2237 	mas_for_each(&mas, val, 1000) {
2238 		mas_pause(&mas);
2239 		i++;
2240 	}
2241 
2242 	/*
2243 	 * 680 - 685 = 0x61a00001930c
2244 	 * 686 - 689 = NULL;
2245 	 * 690 - 695 = 0x61a00001930c
2246 	 * Check simple next/prev
2247 	 */
2248 	mas_set(&mas, 686);
2249 	val = mas_walk(&mas);
2250 	MT_BUG_ON(mt, val != NULL);
2251 
2252 	val = mas_next(&mas, 1000);
2253 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2254 	MT_BUG_ON(mt, mas.index != 690);
2255 	MT_BUG_ON(mt, mas.last != 695);
2256 
2257 	val = mas_prev(&mas, 0);
2258 	MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2259 	MT_BUG_ON(mt, mas.index != 680);
2260 	MT_BUG_ON(mt, mas.last != 685);
2261 
2262 	val = mas_next(&mas, 1000);
2263 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2264 	MT_BUG_ON(mt, mas.index != 690);
2265 	MT_BUG_ON(mt, mas.last != 695);
2266 
2267 	val = mas_next(&mas, 1000);
2268 	MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2269 	MT_BUG_ON(mt, mas.index != 700);
2270 	MT_BUG_ON(mt, mas.last != 705);
2271 
2272 	/* Check across node boundaries of the tree */
2273 	mas_set(&mas, 70);
2274 	val = mas_walk(&mas);
2275 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2276 	MT_BUG_ON(mt, mas.index != 70);
2277 	MT_BUG_ON(mt, mas.last != 75);
2278 
2279 	val = mas_next(&mas, 1000);
2280 	MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2281 	MT_BUG_ON(mt, mas.index != 80);
2282 	MT_BUG_ON(mt, mas.last != 85);
2283 
2284 	val = mas_prev(&mas, 70);
2285 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2286 	MT_BUG_ON(mt, mas.index != 70);
2287 	MT_BUG_ON(mt, mas.last != 75);
2288 
2289 	/* Check across two levels of the tree */
2290 	mas_reset(&mas);
2291 	mas_set(&mas, level2[0]);
2292 	val = mas_walk(&mas);
2293 	MT_BUG_ON(mt, val != NULL);
2294 	val = mas_next(&mas, level2[1]);
2295 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2296 	MT_BUG_ON(mt, mas.index != level2[2]);
2297 	MT_BUG_ON(mt, mas.last != level2[3]);
2298 	mn = mas.node;
2299 
2300 	val = mas_next(&mas, level2[1]);
2301 	MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2302 	MT_BUG_ON(mt, mas.index != level2[4]);
2303 	MT_BUG_ON(mt, mas.last != level2[5]);
2304 	MT_BUG_ON(mt, mn == mas.node);
2305 
2306 	val = mas_prev(&mas, 0);
2307 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2308 	MT_BUG_ON(mt, mas.index != level2[2]);
2309 	MT_BUG_ON(mt, mas.last != level2[3]);
2310 
2311 	/* Check running off the end and back on */
2312 	mas_set(&mas, nr_entries * 10);
2313 	val = mas_walk(&mas);
2314 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2315 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2316 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2317 
2318 	val = mas_next(&mas, ULONG_MAX);
2319 	MT_BUG_ON(mt, val != NULL);
2320 	MT_BUG_ON(mt, mas.index != last_index);
2321 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
2322 
2323 	val = mas_prev(&mas, 0);
2324 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2325 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2326 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2327 
2328 	/* Check running off the start and back on */
2329 	mas_reset(&mas);
2330 	mas_set(&mas, 10);
2331 	val = mas_walk(&mas);
2332 	MT_BUG_ON(mt, val != xa_mk_value(1));
2333 	MT_BUG_ON(mt, mas.index != 10);
2334 	MT_BUG_ON(mt, mas.last != 15);
2335 
2336 	val = mas_prev(&mas, 0);
2337 	MT_BUG_ON(mt, val != xa_mk_value(0));
2338 	MT_BUG_ON(mt, mas.index != 0);
2339 	MT_BUG_ON(mt, mas.last != 5);
2340 
2341 	val = mas_prev(&mas, 0);
2342 	MT_BUG_ON(mt, val != NULL);
2343 	MT_BUG_ON(mt, mas.index != 0);
2344 	MT_BUG_ON(mt, mas.last != 5);
2345 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
2346 
2347 	mas.index = 0;
2348 	mas.last = 5;
2349 	mas_store(&mas, NULL);
2350 	mas_reset(&mas);
2351 	mas_set(&mas, 10);
2352 	mas_walk(&mas);
2353 
2354 	val = mas_prev(&mas, 0);
2355 	MT_BUG_ON(mt, val != NULL);
2356 	MT_BUG_ON(mt, mas.index != 0);
2357 	MT_BUG_ON(mt, mas.last != 9);
2358 	mas_unlock(&mas);
2359 
2360 	mtree_destroy(mt);
2361 
2362 	mt_init(mt);
2363 	mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2364 	mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2365 	rcu_read_lock();
2366 	mas_set(&mas, 5);
2367 	val = mas_prev(&mas, 4);
2368 	MT_BUG_ON(mt, val != NULL);
2369 	rcu_read_unlock();
2370 }
2371 
2372 
2373 
2374 /* Test spanning writes that require balancing right sibling or right cousin */
2375 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2376 {
2377 
2378 	unsigned long i, nr_entries = 1000;
2379 
2380 	for (i = 0; i <= nr_entries; i++)
2381 		mtree_store_range(mt, i*10, i*10 + 5,
2382 				  xa_mk_value(i), GFP_KERNEL);
2383 
2384 
2385 	mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2386 }
2387 
2388 static noinline void __init check_fuzzer(struct maple_tree *mt)
2389 {
2390 	/*
2391 	 * 1. Causes a spanning rebalance of a single root node.
2392 	 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2393 	 * entire right side is consumed.
2394 	 */
2395 	mtree_test_insert(mt, 88, (void *)0xb1);
2396 	mtree_test_insert(mt, 84, (void *)0xa9);
2397 	mtree_test_insert(mt, 2,  (void *)0x5);
2398 	mtree_test_insert(mt, 4,  (void *)0x9);
2399 	mtree_test_insert(mt, 14, (void *)0x1d);
2400 	mtree_test_insert(mt, 7,  (void *)0xf);
2401 	mtree_test_insert(mt, 12, (void *)0x19);
2402 	mtree_test_insert(mt, 18, (void *)0x25);
2403 	mtree_test_store_range(mt, 8, 18, (void *)0x11);
2404 	mtree_destroy(mt);
2405 
2406 
2407 	/*
2408 	 * 2. Cause a spanning rebalance of two nodes in root.
2409 	 * Fixed by setting mast->r->max correctly.
2410 	 */
2411 	mt_init_flags(mt, 0);
2412 	mtree_test_store(mt, 87, (void *)0xaf);
2413 	mtree_test_store(mt, 0, (void *)0x1);
2414 	mtree_test_load(mt, 4);
2415 	mtree_test_insert(mt, 4, (void *)0x9);
2416 	mtree_test_store(mt, 8, (void *)0x11);
2417 	mtree_test_store(mt, 44, (void *)0x59);
2418 	mtree_test_store(mt, 68, (void *)0x89);
2419 	mtree_test_store(mt, 2, (void *)0x5);
2420 	mtree_test_insert(mt, 43, (void *)0x57);
2421 	mtree_test_insert(mt, 24, (void *)0x31);
2422 	mtree_test_insert(mt, 844, (void *)0x699);
2423 	mtree_test_store(mt, 84, (void *)0xa9);
2424 	mtree_test_store(mt, 4, (void *)0x9);
2425 	mtree_test_erase(mt, 4);
2426 	mtree_test_load(mt, 5);
2427 	mtree_test_erase(mt, 0);
2428 	mtree_destroy(mt);
2429 
2430 	/*
2431 	 * 3. Cause a node overflow on copy
2432 	 * Fixed by using the correct check for node size in mas_wr_modify()
2433 	 * Also discovered issue with metadata setting.
2434 	 */
2435 	mt_init_flags(mt, 0);
2436 	mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2437 	mtree_test_store(mt, 4, (void *)0x9);
2438 	mtree_test_erase(mt, 5);
2439 	mtree_test_erase(mt, 0);
2440 	mtree_test_erase(mt, 4);
2441 	mtree_test_store(mt, 5, (void *)0xb);
2442 	mtree_test_erase(mt, 5);
2443 	mtree_test_store(mt, 5, (void *)0xb);
2444 	mtree_test_erase(mt, 5);
2445 	mtree_test_erase(mt, 4);
2446 	mtree_test_store(mt, 4, (void *)0x9);
2447 	mtree_test_store(mt, 444, (void *)0x379);
2448 	mtree_test_store(mt, 0, (void *)0x1);
2449 	mtree_test_load(mt, 0);
2450 	mtree_test_store(mt, 5, (void *)0xb);
2451 	mtree_test_erase(mt, 0);
2452 	mtree_destroy(mt);
2453 
2454 	/*
2455 	 * 4. spanning store failure due to writing incorrect pivot value at
2456 	 * last slot.
2457 	 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2458 	 *
2459 	 */
2460 	mt_init_flags(mt, 0);
2461 	mtree_test_insert(mt, 261, (void *)0x20b);
2462 	mtree_test_store(mt, 516, (void *)0x409);
2463 	mtree_test_store(mt, 6, (void *)0xd);
2464 	mtree_test_insert(mt, 5, (void *)0xb);
2465 	mtree_test_insert(mt, 1256, (void *)0x9d1);
2466 	mtree_test_store(mt, 4, (void *)0x9);
2467 	mtree_test_erase(mt, 1);
2468 	mtree_test_store(mt, 56, (void *)0x71);
2469 	mtree_test_insert(mt, 1, (void *)0x3);
2470 	mtree_test_store(mt, 24, (void *)0x31);
2471 	mtree_test_erase(mt, 1);
2472 	mtree_test_insert(mt, 2263, (void *)0x11af);
2473 	mtree_test_insert(mt, 446, (void *)0x37d);
2474 	mtree_test_store_range(mt, 6, 45, (void *)0xd);
2475 	mtree_test_store_range(mt, 3, 446, (void *)0x7);
2476 	mtree_destroy(mt);
2477 
2478 	/*
2479 	 * 5. mas_wr_extend_null() may overflow slots.
2480 	 * Fix by checking against wr_mas->node_end.
2481 	 */
2482 	mt_init_flags(mt, 0);
2483 	mtree_test_store(mt, 48, (void *)0x61);
2484 	mtree_test_store(mt, 3, (void *)0x7);
2485 	mtree_test_load(mt, 0);
2486 	mtree_test_store(mt, 88, (void *)0xb1);
2487 	mtree_test_store(mt, 81, (void *)0xa3);
2488 	mtree_test_insert(mt, 0, (void *)0x1);
2489 	mtree_test_insert(mt, 8, (void *)0x11);
2490 	mtree_test_insert(mt, 4, (void *)0x9);
2491 	mtree_test_insert(mt, 2480, (void *)0x1361);
2492 	mtree_test_insert(mt, ULONG_MAX,
2493 			  (void *)0xffffffffffffffff);
2494 	mtree_test_erase(mt, ULONG_MAX);
2495 	mtree_destroy(mt);
2496 
2497 	/*
2498 	 * 6.  When reusing a node with an implied pivot and the node is
2499 	 * shrinking, old data would be left in the implied slot
2500 	 * Fixed by checking the last pivot for the mas->max and clear
2501 	 * accordingly.  This only affected the left-most node as that node is
2502 	 * the only one allowed to end in NULL.
2503 	 */
2504 	mt_init_flags(mt, 0);
2505 	mtree_test_erase(mt, 3);
2506 	mtree_test_insert(mt, 22, (void *)0x2d);
2507 	mtree_test_insert(mt, 15, (void *)0x1f);
2508 	mtree_test_load(mt, 2);
2509 	mtree_test_insert(mt, 1, (void *)0x3);
2510 	mtree_test_insert(mt, 1, (void *)0x3);
2511 	mtree_test_insert(mt, 5, (void *)0xb);
2512 	mtree_test_erase(mt, 1);
2513 	mtree_test_insert(mt, 1, (void *)0x3);
2514 	mtree_test_insert(mt, 4, (void *)0x9);
2515 	mtree_test_insert(mt, 1, (void *)0x3);
2516 	mtree_test_erase(mt, 1);
2517 	mtree_test_insert(mt, 2, (void *)0x5);
2518 	mtree_test_insert(mt, 1, (void *)0x3);
2519 	mtree_test_erase(mt, 3);
2520 	mtree_test_insert(mt, 22, (void *)0x2d);
2521 	mtree_test_insert(mt, 15, (void *)0x1f);
2522 	mtree_test_insert(mt, 2, (void *)0x5);
2523 	mtree_test_insert(mt, 1, (void *)0x3);
2524 	mtree_test_insert(mt, 8, (void *)0x11);
2525 	mtree_test_load(mt, 2);
2526 	mtree_test_insert(mt, 1, (void *)0x3);
2527 	mtree_test_insert(mt, 1, (void *)0x3);
2528 	mtree_test_store(mt, 1, (void *)0x3);
2529 	mtree_test_insert(mt, 5, (void *)0xb);
2530 	mtree_test_erase(mt, 1);
2531 	mtree_test_insert(mt, 1, (void *)0x3);
2532 	mtree_test_insert(mt, 4, (void *)0x9);
2533 	mtree_test_insert(mt, 1, (void *)0x3);
2534 	mtree_test_erase(mt, 1);
2535 	mtree_test_insert(mt, 2, (void *)0x5);
2536 	mtree_test_insert(mt, 1, (void *)0x3);
2537 	mtree_test_erase(mt, 3);
2538 	mtree_test_insert(mt, 22, (void *)0x2d);
2539 	mtree_test_insert(mt, 15, (void *)0x1f);
2540 	mtree_test_insert(mt, 2, (void *)0x5);
2541 	mtree_test_insert(mt, 1, (void *)0x3);
2542 	mtree_test_insert(mt, 8, (void *)0x11);
2543 	mtree_test_insert(mt, 12, (void *)0x19);
2544 	mtree_test_erase(mt, 1);
2545 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2546 	mtree_test_erase(mt, 62);
2547 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2548 	mtree_test_insert(mt, 11, (void *)0x17);
2549 	mtree_test_insert(mt, 3, (void *)0x7);
2550 	mtree_test_insert(mt, 3, (void *)0x7);
2551 	mtree_test_store(mt, 62, (void *)0x7d);
2552 	mtree_test_erase(mt, 62);
2553 	mtree_test_store_range(mt, 1, 15, (void *)0x3);
2554 	mtree_test_erase(mt, 1);
2555 	mtree_test_insert(mt, 22, (void *)0x2d);
2556 	mtree_test_insert(mt, 12, (void *)0x19);
2557 	mtree_test_erase(mt, 1);
2558 	mtree_test_insert(mt, 3, (void *)0x7);
2559 	mtree_test_store(mt, 62, (void *)0x7d);
2560 	mtree_test_erase(mt, 62);
2561 	mtree_test_insert(mt, 122, (void *)0xf5);
2562 	mtree_test_store(mt, 3, (void *)0x7);
2563 	mtree_test_insert(mt, 0, (void *)0x1);
2564 	mtree_test_store_range(mt, 0, 1, (void *)0x1);
2565 	mtree_test_insert(mt, 85, (void *)0xab);
2566 	mtree_test_insert(mt, 72, (void *)0x91);
2567 	mtree_test_insert(mt, 81, (void *)0xa3);
2568 	mtree_test_insert(mt, 726, (void *)0x5ad);
2569 	mtree_test_insert(mt, 0, (void *)0x1);
2570 	mtree_test_insert(mt, 1, (void *)0x3);
2571 	mtree_test_store(mt, 51, (void *)0x67);
2572 	mtree_test_insert(mt, 611, (void *)0x4c7);
2573 	mtree_test_insert(mt, 485, (void *)0x3cb);
2574 	mtree_test_insert(mt, 1, (void *)0x3);
2575 	mtree_test_erase(mt, 1);
2576 	mtree_test_insert(mt, 0, (void *)0x1);
2577 	mtree_test_insert(mt, 1, (void *)0x3);
2578 	mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2579 	mtree_test_load(mt, 1);
2580 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2581 	mtree_test_insert(mt, 1, (void *)0x3);
2582 	mtree_test_erase(mt, 1);
2583 	mtree_test_load(mt, 53);
2584 	mtree_test_load(mt, 1);
2585 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2586 	mtree_test_insert(mt, 222, (void *)0x1bd);
2587 	mtree_test_insert(mt, 485, (void *)0x3cb);
2588 	mtree_test_insert(mt, 1, (void *)0x3);
2589 	mtree_test_erase(mt, 1);
2590 	mtree_test_load(mt, 0);
2591 	mtree_test_insert(mt, 21, (void *)0x2b);
2592 	mtree_test_insert(mt, 3, (void *)0x7);
2593 	mtree_test_store(mt, 621, (void *)0x4db);
2594 	mtree_test_insert(mt, 0, (void *)0x1);
2595 	mtree_test_erase(mt, 5);
2596 	mtree_test_insert(mt, 1, (void *)0x3);
2597 	mtree_test_store(mt, 62, (void *)0x7d);
2598 	mtree_test_erase(mt, 62);
2599 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2600 	mtree_test_insert(mt, 22, (void *)0x2d);
2601 	mtree_test_insert(mt, 12, (void *)0x19);
2602 	mtree_test_erase(mt, 1);
2603 	mtree_test_insert(mt, 1, (void *)0x3);
2604 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2605 	mtree_test_erase(mt, 62);
2606 	mtree_test_erase(mt, 1);
2607 	mtree_test_load(mt, 1);
2608 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2609 	mtree_test_insert(mt, 1, (void *)0x3);
2610 	mtree_test_erase(mt, 1);
2611 	mtree_test_load(mt, 53);
2612 	mtree_test_load(mt, 1);
2613 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2614 	mtree_test_insert(mt, 222, (void *)0x1bd);
2615 	mtree_test_insert(mt, 485, (void *)0x3cb);
2616 	mtree_test_insert(mt, 1, (void *)0x3);
2617 	mtree_test_erase(mt, 1);
2618 	mtree_test_insert(mt, 1, (void *)0x3);
2619 	mtree_test_load(mt, 0);
2620 	mtree_test_load(mt, 0);
2621 	mtree_destroy(mt);
2622 
2623 	/*
2624 	 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2625 	 * data by overwriting it first - that way metadata is of no concern.
2626 	 */
2627 	mt_init_flags(mt, 0);
2628 	mtree_test_load(mt, 1);
2629 	mtree_test_insert(mt, 102, (void *)0xcd);
2630 	mtree_test_erase(mt, 2);
2631 	mtree_test_erase(mt, 0);
2632 	mtree_test_load(mt, 0);
2633 	mtree_test_insert(mt, 4, (void *)0x9);
2634 	mtree_test_insert(mt, 2, (void *)0x5);
2635 	mtree_test_insert(mt, 110, (void *)0xdd);
2636 	mtree_test_insert(mt, 1, (void *)0x3);
2637 	mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2638 	mtree_test_erase(mt, 2);
2639 	mtree_test_store(mt, 0, (void *)0x1);
2640 	mtree_test_store(mt, 112, (void *)0xe1);
2641 	mtree_test_insert(mt, 21, (void *)0x2b);
2642 	mtree_test_store(mt, 1, (void *)0x3);
2643 	mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2644 	mtree_test_store(mt, 2, (void *)0x5);
2645 	mtree_test_load(mt, 22);
2646 	mtree_test_erase(mt, 2);
2647 	mtree_test_store(mt, 210, (void *)0x1a5);
2648 	mtree_test_store_range(mt, 0, 2, (void *)0x1);
2649 	mtree_test_store(mt, 2, (void *)0x5);
2650 	mtree_test_erase(mt, 2);
2651 	mtree_test_erase(mt, 22);
2652 	mtree_test_erase(mt, 1);
2653 	mtree_test_erase(mt, 2);
2654 	mtree_test_store(mt, 0, (void *)0x1);
2655 	mtree_test_load(mt, 112);
2656 	mtree_test_insert(mt, 2, (void *)0x5);
2657 	mtree_test_erase(mt, 2);
2658 	mtree_test_store(mt, 1, (void *)0x3);
2659 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2660 	mtree_test_erase(mt, 0);
2661 	mtree_test_erase(mt, 2);
2662 	mtree_test_store(mt, 2, (void *)0x5);
2663 	mtree_test_erase(mt, 0);
2664 	mtree_test_erase(mt, 2);
2665 	mtree_test_store(mt, 0, (void *)0x1);
2666 	mtree_test_store(mt, 0, (void *)0x1);
2667 	mtree_test_erase(mt, 2);
2668 	mtree_test_store(mt, 2, (void *)0x5);
2669 	mtree_test_erase(mt, 2);
2670 	mtree_test_insert(mt, 2, (void *)0x5);
2671 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2672 	mtree_test_erase(mt, 0);
2673 	mtree_test_erase(mt, 2);
2674 	mtree_test_store(mt, 0, (void *)0x1);
2675 	mtree_test_load(mt, 112);
2676 	mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2677 	mtree_test_store(mt, 2, (void *)0x5);
2678 	mtree_test_load(mt, 110);
2679 	mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2680 	mtree_test_load(mt, 2);
2681 	mtree_test_store(mt, 2, (void *)0x5);
2682 	mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2683 	mtree_test_erase(mt, 12);
2684 	mtree_test_store(mt, 2, (void *)0x5);
2685 	mtree_test_load(mt, 22);
2686 	mtree_destroy(mt);
2687 
2688 
2689 	/*
2690 	 * 8.  When rebalancing or spanning_rebalance(), the max of the new node
2691 	 * may be set incorrectly to the final pivot and not the right max.
2692 	 * Fix by setting the left max to orig right max if the entire node is
2693 	 * consumed.
2694 	 */
2695 	mt_init_flags(mt, 0);
2696 	mtree_test_store(mt, 6, (void *)0xd);
2697 	mtree_test_store(mt, 67, (void *)0x87);
2698 	mtree_test_insert(mt, 15, (void *)0x1f);
2699 	mtree_test_insert(mt, 6716, (void *)0x3479);
2700 	mtree_test_store(mt, 61, (void *)0x7b);
2701 	mtree_test_insert(mt, 13, (void *)0x1b);
2702 	mtree_test_store(mt, 8, (void *)0x11);
2703 	mtree_test_insert(mt, 1, (void *)0x3);
2704 	mtree_test_load(mt, 0);
2705 	mtree_test_erase(mt, 67167);
2706 	mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2707 	mtree_test_insert(mt, 6, (void *)0xd);
2708 	mtree_test_erase(mt, 67);
2709 	mtree_test_insert(mt, 1, (void *)0x3);
2710 	mtree_test_erase(mt, 667167);
2711 	mtree_test_insert(mt, 6, (void *)0xd);
2712 	mtree_test_store(mt, 67, (void *)0x87);
2713 	mtree_test_insert(mt, 5, (void *)0xb);
2714 	mtree_test_erase(mt, 1);
2715 	mtree_test_insert(mt, 6, (void *)0xd);
2716 	mtree_test_erase(mt, 67);
2717 	mtree_test_insert(mt, 15, (void *)0x1f);
2718 	mtree_test_insert(mt, 67167, (void *)0x20cbf);
2719 	mtree_test_insert(mt, 1, (void *)0x3);
2720 	mtree_test_load(mt, 7);
2721 	mtree_test_insert(mt, 16, (void *)0x21);
2722 	mtree_test_insert(mt, 36, (void *)0x49);
2723 	mtree_test_store(mt, 67, (void *)0x87);
2724 	mtree_test_store(mt, 6, (void *)0xd);
2725 	mtree_test_insert(mt, 367, (void *)0x2df);
2726 	mtree_test_insert(mt, 115, (void *)0xe7);
2727 	mtree_test_store(mt, 0, (void *)0x1);
2728 	mtree_test_store_range(mt, 1, 3, (void *)0x3);
2729 	mtree_test_store(mt, 1, (void *)0x3);
2730 	mtree_test_erase(mt, 67167);
2731 	mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2732 	mtree_test_store(mt, 1, (void *)0x3);
2733 	mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2734 	mtree_test_load(mt, 67);
2735 	mtree_test_insert(mt, 1, (void *)0x3);
2736 	mtree_test_erase(mt, 67167);
2737 	mtree_destroy(mt);
2738 
2739 	/*
2740 	 * 9. spanning store to the end of data caused an invalid metadata
2741 	 * length which resulted in a crash eventually.
2742 	 * Fix by checking if there is a value in pivot before incrementing the
2743 	 * metadata end in mab_mas_cp().  To ensure this doesn't happen again,
2744 	 * abstract the two locations this happens into a function called
2745 	 * mas_leaf_set_meta().
2746 	 */
2747 	mt_init_flags(mt, 0);
2748 	mtree_test_insert(mt, 21, (void *)0x2b);
2749 	mtree_test_insert(mt, 12, (void *)0x19);
2750 	mtree_test_insert(mt, 6, (void *)0xd);
2751 	mtree_test_insert(mt, 8, (void *)0x11);
2752 	mtree_test_insert(mt, 2, (void *)0x5);
2753 	mtree_test_insert(mt, 91, (void *)0xb7);
2754 	mtree_test_insert(mt, 18, (void *)0x25);
2755 	mtree_test_insert(mt, 81, (void *)0xa3);
2756 	mtree_test_store_range(mt, 0, 128, (void *)0x1);
2757 	mtree_test_store(mt, 1, (void *)0x3);
2758 	mtree_test_erase(mt, 8);
2759 	mtree_test_insert(mt, 11, (void *)0x17);
2760 	mtree_test_insert(mt, 8, (void *)0x11);
2761 	mtree_test_insert(mt, 21, (void *)0x2b);
2762 	mtree_test_insert(mt, 2, (void *)0x5);
2763 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2764 	mtree_test_erase(mt, ULONG_MAX - 10);
2765 	mtree_test_store_range(mt, 0, 281, (void *)0x1);
2766 	mtree_test_erase(mt, 2);
2767 	mtree_test_insert(mt, 1211, (void *)0x977);
2768 	mtree_test_insert(mt, 111, (void *)0xdf);
2769 	mtree_test_insert(mt, 13, (void *)0x1b);
2770 	mtree_test_insert(mt, 211, (void *)0x1a7);
2771 	mtree_test_insert(mt, 11, (void *)0x17);
2772 	mtree_test_insert(mt, 5, (void *)0xb);
2773 	mtree_test_insert(mt, 1218, (void *)0x985);
2774 	mtree_test_insert(mt, 61, (void *)0x7b);
2775 	mtree_test_store(mt, 1, (void *)0x3);
2776 	mtree_test_insert(mt, 121, (void *)0xf3);
2777 	mtree_test_insert(mt, 8, (void *)0x11);
2778 	mtree_test_insert(mt, 21, (void *)0x2b);
2779 	mtree_test_insert(mt, 2, (void *)0x5);
2780 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2781 	mtree_test_erase(mt, ULONG_MAX - 10);
2782 }
2783 
2784 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2785 {
2786 	int i = 50;
2787 	MA_STATE(mas, mt, 0, 0);
2788 
2789 	mt_set_non_kernel(9999);
2790 	mas_lock(&mas);
2791 	do {
2792 		mas_set_range(&mas, i*10, i*10+9);
2793 		mas_store(&mas, check_bnode_min_spanning);
2794 	} while (i--);
2795 
2796 	mas_set_range(&mas, 240, 509);
2797 	mas_store(&mas, NULL);
2798 	mas_unlock(&mas);
2799 	mas_destroy(&mas);
2800 	mt_set_non_kernel(0);
2801 }
2802 
2803 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2804 {
2805 	unsigned long i, nr_entries = 20;
2806 	MA_STATE(mas, mt, 0, 0);
2807 
2808 	for (i = 1; i <= nr_entries; i++)
2809 		mtree_store_range(mt, i*10, i*10 + 9,
2810 				  xa_mk_value(i), GFP_KERNEL);
2811 
2812 	/* Create another hole besides the one at 0 */
2813 	mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2814 
2815 	/* Check lower bounds that don't fit */
2816 	rcu_read_lock();
2817 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2818 
2819 	mas_reset(&mas);
2820 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2821 
2822 	/* Check lower bound that does fit */
2823 	mas_reset(&mas);
2824 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2825 	MT_BUG_ON(mt, mas.index != 5);
2826 	MT_BUG_ON(mt, mas.last != 9);
2827 	rcu_read_unlock();
2828 
2829 	/* Check one gap that doesn't fit and one that does */
2830 	rcu_read_lock();
2831 	mas_reset(&mas);
2832 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2833 	MT_BUG_ON(mt, mas.index != 161);
2834 	MT_BUG_ON(mt, mas.last != 169);
2835 
2836 	/* Check one gap that does fit above the min */
2837 	mas_reset(&mas);
2838 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2839 	MT_BUG_ON(mt, mas.index != 216);
2840 	MT_BUG_ON(mt, mas.last != 218);
2841 
2842 	/* Check size that doesn't fit any gap */
2843 	mas_reset(&mas);
2844 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2845 
2846 	/*
2847 	 * Check size that doesn't fit the lower end of the window but
2848 	 * does fit the gap
2849 	 */
2850 	mas_reset(&mas);
2851 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2852 
2853 	/*
2854 	 * Check size that doesn't fit the upper end of the window but
2855 	 * does fit the gap
2856 	 */
2857 	mas_reset(&mas);
2858 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2859 
2860 	/* Check mas_empty_area forward */
2861 	mas_reset(&mas);
2862 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2863 	MT_BUG_ON(mt, mas.index != 0);
2864 	MT_BUG_ON(mt, mas.last != 8);
2865 
2866 	mas_reset(&mas);
2867 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2868 	MT_BUG_ON(mt, mas.index != 0);
2869 	MT_BUG_ON(mt, mas.last != 3);
2870 
2871 	mas_reset(&mas);
2872 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2873 
2874 	mas_reset(&mas);
2875 	MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2876 
2877 	mas_reset(&mas);
2878 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2879 
2880 	mas_reset(&mas);
2881 	mas_empty_area(&mas, 100, 165, 3);
2882 
2883 	mas_reset(&mas);
2884 	MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2885 	rcu_read_unlock();
2886 }
2887 
2888 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2889 {
2890 	const unsigned long max = 0x25D78000;
2891 	unsigned long size;
2892 	int loop, shift;
2893 	MA_STATE(mas, mt, 0, 0);
2894 
2895 	mt_set_non_kernel(99999);
2896 	for (shift = 12; shift <= 16; shift++) {
2897 		loop = 5000;
2898 		size = 1 << shift;
2899 		while (loop--) {
2900 			mas_set(&mas, 0);
2901 			mas_lock(&mas);
2902 			MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2903 			MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2904 			mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2905 			mas_unlock(&mas);
2906 			mas_reset(&mas);
2907 		}
2908 	}
2909 
2910 	/* No space left. */
2911 	size = 0x1000;
2912 	rcu_read_lock();
2913 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2914 	rcu_read_unlock();
2915 
2916 	/* Fill a depth 3 node to the maximum */
2917 	for (unsigned long i = 629440511; i <= 629440800; i += 6)
2918 		mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2919 	/* Make space in the second-last depth 4 node */
2920 	mtree_erase(mt, 631668735);
2921 	/* Make space in the last depth 4 node */
2922 	mtree_erase(mt, 629506047);
2923 	mas_reset(&mas);
2924 	/* Search from just after the gap in the second-last depth 4 */
2925 	rcu_read_lock();
2926 	MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2927 	rcu_read_unlock();
2928 	mt_set_non_kernel(0);
2929 }
2930 
2931 /*
2932  * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2933  *
2934  * The table below shows the single entry tree (0-0 pointer) and normal tree
2935  * with nodes.
2936  *
2937  * Function	ENTRY	Start		Result		index & last
2938  *     ┬          ┬       ┬               ┬                ┬
2939  *     │          │       │               │                └─ the final range
2940  *     │          │       │               └─ The node value after execution
2941  *     │          │       └─ The node value before execution
2942  *     │          └─ If the entry exists or does not exists (DNE)
2943  *     └─ The function name
2944  *
2945  * Function	ENTRY	Start		Result		index & last
2946  * mas_next()
2947  *  - after last
2948  *			Single entry tree at 0-0
2949  *			------------------------
2950  *		DNE	MAS_START	MAS_NONE	1 - oo
2951  *		DNE	MAS_PAUSE	MAS_NONE	1 - oo
2952  *		DNE	MAS_ROOT	MAS_NONE	1 - oo
2953  *			when index = 0
2954  *		DNE	MAS_NONE	MAS_ROOT	0
2955  *			when index > 0
2956  *		DNE	MAS_NONE	MAS_NONE	1 - oo
2957  *
2958  *			Normal tree
2959  *			-----------
2960  *		exists	MAS_START	active		range
2961  *		DNE	MAS_START	active		set to last range
2962  *		exists	MAS_PAUSE	active		range
2963  *		DNE	MAS_PAUSE	active		set to last range
2964  *		exists	MAS_NONE	active		range
2965  *		exists	active		active		range
2966  *		DNE	active		active		set to last range
2967  *		ERANGE	active		MAS_OVERFLOW	last range
2968  *
2969  * Function	ENTRY	Start		Result		index & last
2970  * mas_prev()
2971  * - before index
2972  *			Single entry tree at 0-0
2973  *			------------------------
2974  *				if index > 0
2975  *		exists	MAS_START	MAS_ROOT	0
2976  *		exists	MAS_PAUSE	MAS_ROOT	0
2977  *		exists	MAS_NONE	MAS_ROOT	0
2978  *
2979  *				if index == 0
2980  *		DNE	MAS_START	MAS_NONE	0
2981  *		DNE	MAS_PAUSE	MAS_NONE	0
2982  *		DNE	MAS_NONE	MAS_NONE	0
2983  *		DNE	MAS_ROOT	MAS_NONE	0
2984  *
2985  *			Normal tree
2986  *			-----------
2987  *		exists	MAS_START	active		range
2988  *		DNE	MAS_START	active		set to min
2989  *		exists	MAS_PAUSE	active		range
2990  *		DNE	MAS_PAUSE	active		set to min
2991  *		exists	MAS_NONE	active		range
2992  *		DNE	MAS_NONE	MAS_NONE	set to min
2993  *		any	MAS_ROOT	MAS_NONE	0
2994  *		exists	active		active		range
2995  *		DNE	active		active		last range
2996  *		ERANGE	active		MAS_UNDERFLOW	last range
2997  *
2998  * Function	ENTRY	Start		Result		index & last
2999  * mas_find()
3000  *  - at index or next
3001  *			Single entry tree at 0-0
3002  *			------------------------
3003  *				if index >  0
3004  *		DNE	MAS_START	MAS_NONE	0
3005  *		DNE	MAS_PAUSE	MAS_NONE	0
3006  *		DNE	MAS_ROOT	MAS_NONE	0
3007  *		DNE	MAS_NONE	MAS_NONE	1
3008  *				if index ==  0
3009  *		exists	MAS_START	MAS_ROOT	0
3010  *		exists	MAS_PAUSE	MAS_ROOT	0
3011  *		exists	MAS_NONE	MAS_ROOT	0
3012  *
3013  *			Normal tree
3014  *			-----------
3015  *		exists	MAS_START	active		range
3016  *		DNE	MAS_START	active		set to max
3017  *		exists	MAS_PAUSE	active		range
3018  *		DNE	MAS_PAUSE	active		set to max
3019  *		exists	MAS_NONE	active		range (start at last)
3020  *		exists	active		active		range
3021  *		DNE	active		active		last range (max < last)
3022  *
3023  * Function	ENTRY	Start		Result		index & last
3024  * mas_find_rev()
3025  *  - at index or before
3026  *			Single entry tree at 0-0
3027  *			------------------------
3028  *				if index >  0
3029  *		exists	MAS_START	MAS_ROOT	0
3030  *		exists	MAS_PAUSE	MAS_ROOT	0
3031  *		exists	MAS_NONE	MAS_ROOT	0
3032  *				if index ==  0
3033  *		DNE	MAS_START	MAS_NONE	0
3034  *		DNE	MAS_PAUSE	MAS_NONE	0
3035  *		DNE	MAS_NONE	MAS_NONE	0
3036  *		DNE	MAS_ROOT	MAS_NONE	0
3037  *
3038  *			Normal tree
3039  *			-----------
3040  *		exists	MAS_START	active		range
3041  *		DNE	MAS_START	active		set to min
3042  *		exists	MAS_PAUSE	active		range
3043  *		DNE	MAS_PAUSE	active		set to min
3044  *		exists	MAS_NONE	active		range (start at index)
3045  *		exists	active		active		range
3046  *		DNE	active		active		last range (min > index)
3047  *
3048  * Function	ENTRY	Start		Result		index & last
3049  * mas_walk()
3050  * - Look up index
3051  *			Single entry tree at 0-0
3052  *			------------------------
3053  *				if index >  0
3054  *		DNE	MAS_START	MAS_ROOT	1 - oo
3055  *		DNE	MAS_PAUSE	MAS_ROOT	1 - oo
3056  *		DNE	MAS_NONE	MAS_ROOT	1 - oo
3057  *		DNE	MAS_ROOT	MAS_ROOT	1 - oo
3058  *				if index ==  0
3059  *		exists	MAS_START	MAS_ROOT	0
3060  *		exists	MAS_PAUSE	MAS_ROOT	0
3061  *		exists	MAS_NONE	MAS_ROOT	0
3062  *		exists	MAS_ROOT	MAS_ROOT	0
3063  *
3064  *			Normal tree
3065  *			-----------
3066  *		exists	MAS_START	active		range
3067  *		DNE	MAS_START	active		range of NULL
3068  *		exists	MAS_PAUSE	active		range
3069  *		DNE	MAS_PAUSE	active		range of NULL
3070  *		exists	MAS_NONE	active		range
3071  *		DNE	MAS_NONE	active		range of NULL
3072  *		exists	active		active		range
3073  *		DNE	active		active		range of NULL
3074  */
3075 
3076 static noinline void __init check_state_handling(struct maple_tree *mt)
3077 {
3078 	MA_STATE(mas, mt, 0, 0);
3079 	void *entry, *ptr = (void *) 0x1234500;
3080 	void *ptr2 = &ptr;
3081 	void *ptr3 = &ptr2;
3082 	unsigned long index;
3083 
3084 	/* Check MAS_ROOT First */
3085 	mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3086 
3087 	mas_lock(&mas);
3088 	/* prev: Start -> underflow*/
3089 	entry = mas_prev(&mas, 0);
3090 	MT_BUG_ON(mt, entry != NULL);
3091 	MT_BUG_ON(mt, mas.status != ma_underflow);
3092 
3093 	/* prev: Start -> root */
3094 	mas_set(&mas, 10);
3095 	entry = mas_prev(&mas, 0);
3096 	MT_BUG_ON(mt, entry != ptr);
3097 	MT_BUG_ON(mt, mas.index != 0);
3098 	MT_BUG_ON(mt, mas.last != 0);
3099 	MT_BUG_ON(mt, mas.status != ma_root);
3100 
3101 	/* prev: pause -> root */
3102 	mas_set(&mas, 10);
3103 	mas_pause(&mas);
3104 	entry = mas_prev(&mas, 0);
3105 	MT_BUG_ON(mt, entry != ptr);
3106 	MT_BUG_ON(mt, mas.index != 0);
3107 	MT_BUG_ON(mt, mas.last != 0);
3108 	MT_BUG_ON(mt, mas.status != ma_root);
3109 
3110 	/* next: start -> none */
3111 	mas_set(&mas, 0);
3112 	entry = mas_next(&mas, ULONG_MAX);
3113 	MT_BUG_ON(mt, mas.index != 1);
3114 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3115 	MT_BUG_ON(mt, entry != NULL);
3116 	MT_BUG_ON(mt, mas.status != ma_none);
3117 
3118 	/* next: start -> none*/
3119 	mas_set(&mas, 10);
3120 	entry = mas_next(&mas, ULONG_MAX);
3121 	MT_BUG_ON(mt, mas.index != 1);
3122 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3123 	MT_BUG_ON(mt, entry != NULL);
3124 	MT_BUG_ON(mt, mas.status != ma_none);
3125 
3126 	/* find: start -> root */
3127 	mas_set(&mas, 0);
3128 	entry = mas_find(&mas, ULONG_MAX);
3129 	MT_BUG_ON(mt, entry != ptr);
3130 	MT_BUG_ON(mt, mas.index != 0);
3131 	MT_BUG_ON(mt, mas.last != 0);
3132 	MT_BUG_ON(mt, mas.status != ma_root);
3133 
3134 	/* find: root -> none */
3135 	entry = mas_find(&mas, ULONG_MAX);
3136 	MT_BUG_ON(mt, entry != NULL);
3137 	MT_BUG_ON(mt, mas.index != 1);
3138 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3139 	MT_BUG_ON(mt, mas.status != ma_none);
3140 
3141 	/* find: none -> none */
3142 	entry = mas_find(&mas, ULONG_MAX);
3143 	MT_BUG_ON(mt, entry != NULL);
3144 	MT_BUG_ON(mt, mas.index != 1);
3145 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3146 	MT_BUG_ON(mt, mas.status != ma_none);
3147 
3148 	/* find: start -> none */
3149 	mas_set(&mas, 10);
3150 	entry = mas_find(&mas, ULONG_MAX);
3151 	MT_BUG_ON(mt, entry != NULL);
3152 	MT_BUG_ON(mt, mas.index != 1);
3153 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3154 	MT_BUG_ON(mt, mas.status != ma_none);
3155 
3156 	/* find_rev: none -> root */
3157 	entry = mas_find_rev(&mas, 0);
3158 	MT_BUG_ON(mt, entry != ptr);
3159 	MT_BUG_ON(mt, mas.index != 0);
3160 	MT_BUG_ON(mt, mas.last != 0);
3161 	MT_BUG_ON(mt, mas.status != ma_root);
3162 
3163 	/* find_rev: start -> root */
3164 	mas_set(&mas, 0);
3165 	entry = mas_find_rev(&mas, 0);
3166 	MT_BUG_ON(mt, entry != ptr);
3167 	MT_BUG_ON(mt, mas.index != 0);
3168 	MT_BUG_ON(mt, mas.last != 0);
3169 	MT_BUG_ON(mt, mas.status != ma_root);
3170 
3171 	/* find_rev: root -> none */
3172 	entry = mas_find_rev(&mas, 0);
3173 	MT_BUG_ON(mt, entry != NULL);
3174 	MT_BUG_ON(mt, mas.index != 0);
3175 	MT_BUG_ON(mt, mas.last != 0);
3176 	MT_BUG_ON(mt, mas.status != ma_none);
3177 
3178 	/* find_rev: none -> none */
3179 	entry = mas_find_rev(&mas, 0);
3180 	MT_BUG_ON(mt, entry != NULL);
3181 	MT_BUG_ON(mt, mas.index != 0);
3182 	MT_BUG_ON(mt, mas.last != 0);
3183 	MT_BUG_ON(mt, mas.status != ma_none);
3184 
3185 	/* find_rev: start -> root */
3186 	mas_set(&mas, 10);
3187 	entry = mas_find_rev(&mas, 0);
3188 	MT_BUG_ON(mt, entry != ptr);
3189 	MT_BUG_ON(mt, mas.index != 0);
3190 	MT_BUG_ON(mt, mas.last != 0);
3191 	MT_BUG_ON(mt, mas.status != ma_root);
3192 
3193 	/* walk: start -> none */
3194 	mas_set(&mas, 10);
3195 	entry = mas_walk(&mas);
3196 	MT_BUG_ON(mt, entry != NULL);
3197 	MT_BUG_ON(mt, mas.index != 1);
3198 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3199 	MT_BUG_ON(mt, mas.status != ma_none);
3200 
3201 	/* walk: pause -> none*/
3202 	mas_set(&mas, 10);
3203 	mas_pause(&mas);
3204 	entry = mas_walk(&mas);
3205 	MT_BUG_ON(mt, entry != NULL);
3206 	MT_BUG_ON(mt, mas.index != 1);
3207 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3208 	MT_BUG_ON(mt, mas.status != ma_none);
3209 
3210 	/* walk: none -> none */
3211 	mas.index = mas.last = 10;
3212 	entry = mas_walk(&mas);
3213 	MT_BUG_ON(mt, entry != NULL);
3214 	MT_BUG_ON(mt, mas.index != 1);
3215 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3216 	MT_BUG_ON(mt, mas.status != ma_none);
3217 
3218 	/* walk: none -> none */
3219 	entry = mas_walk(&mas);
3220 	MT_BUG_ON(mt, entry != NULL);
3221 	MT_BUG_ON(mt, mas.index != 1);
3222 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3223 	MT_BUG_ON(mt, mas.status != ma_none);
3224 
3225 	/* walk: start -> root */
3226 	mas_set(&mas, 0);
3227 	entry = mas_walk(&mas);
3228 	MT_BUG_ON(mt, entry != ptr);
3229 	MT_BUG_ON(mt, mas.index != 0);
3230 	MT_BUG_ON(mt, mas.last != 0);
3231 	MT_BUG_ON(mt, mas.status != ma_root);
3232 
3233 	/* walk: pause -> root */
3234 	mas_set(&mas, 0);
3235 	mas_pause(&mas);
3236 	entry = mas_walk(&mas);
3237 	MT_BUG_ON(mt, entry != ptr);
3238 	MT_BUG_ON(mt, mas.index != 0);
3239 	MT_BUG_ON(mt, mas.last != 0);
3240 	MT_BUG_ON(mt, mas.status != ma_root);
3241 
3242 	/* walk: none -> root */
3243 	mas.status = ma_none;
3244 	entry = mas_walk(&mas);
3245 	MT_BUG_ON(mt, entry != ptr);
3246 	MT_BUG_ON(mt, mas.index != 0);
3247 	MT_BUG_ON(mt, mas.last != 0);
3248 	MT_BUG_ON(mt, mas.status != ma_root);
3249 
3250 	/* walk: root -> root */
3251 	entry = mas_walk(&mas);
3252 	MT_BUG_ON(mt, entry != ptr);
3253 	MT_BUG_ON(mt, mas.index != 0);
3254 	MT_BUG_ON(mt, mas.last != 0);
3255 	MT_BUG_ON(mt, mas.status != ma_root);
3256 
3257 	/* walk: root -> none */
3258 	mas_set(&mas, 10);
3259 	entry = mas_walk(&mas);
3260 	MT_BUG_ON(mt, entry != NULL);
3261 	MT_BUG_ON(mt, mas.index != 1);
3262 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3263 	MT_BUG_ON(mt, mas.status != ma_none);
3264 
3265 	/* walk: none -> root */
3266 	mas.index = mas.last = 0;
3267 	entry = mas_walk(&mas);
3268 	MT_BUG_ON(mt, entry != ptr);
3269 	MT_BUG_ON(mt, mas.index != 0);
3270 	MT_BUG_ON(mt, mas.last != 0);
3271 	MT_BUG_ON(mt, mas.status != ma_root);
3272 
3273 	mas_unlock(&mas);
3274 
3275 	/* Check when there is an actual node */
3276 	mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3277 	mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3278 	mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3279 	mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3280 
3281 	mas_lock(&mas);
3282 
3283 	/* next: start ->active */
3284 	mas_set(&mas, 0);
3285 	entry = mas_next(&mas, ULONG_MAX);
3286 	MT_BUG_ON(mt, entry != ptr);
3287 	MT_BUG_ON(mt, mas.index != 0x1000);
3288 	MT_BUG_ON(mt, mas.last != 0x1500);
3289 	MT_BUG_ON(mt, !mas_is_active(&mas));
3290 
3291 	/* next: pause ->active */
3292 	mas_set(&mas, 0);
3293 	mas_pause(&mas);
3294 	entry = mas_next(&mas, ULONG_MAX);
3295 	MT_BUG_ON(mt, entry != ptr);
3296 	MT_BUG_ON(mt, mas.index != 0x1000);
3297 	MT_BUG_ON(mt, mas.last != 0x1500);
3298 	MT_BUG_ON(mt, !mas_is_active(&mas));
3299 
3300 	/* next: none ->active */
3301 	mas.index = mas.last = 0;
3302 	mas.offset = 0;
3303 	mas.status = ma_none;
3304 	entry = mas_next(&mas, ULONG_MAX);
3305 	MT_BUG_ON(mt, entry != ptr);
3306 	MT_BUG_ON(mt, mas.index != 0x1000);
3307 	MT_BUG_ON(mt, mas.last != 0x1500);
3308 	MT_BUG_ON(mt, !mas_is_active(&mas));
3309 
3310 	/* next:active ->active (spanning limit) */
3311 	entry = mas_next(&mas, 0x2100);
3312 	MT_BUG_ON(mt, entry != ptr2);
3313 	MT_BUG_ON(mt, mas.index != 0x2000);
3314 	MT_BUG_ON(mt, mas.last != 0x2500);
3315 	MT_BUG_ON(mt, !mas_is_active(&mas));
3316 
3317 	/* next:active -> overflow (limit reached) beyond data */
3318 	entry = mas_next(&mas, 0x2999);
3319 	MT_BUG_ON(mt, entry != NULL);
3320 	MT_BUG_ON(mt, mas.index != 0x2501);
3321 	MT_BUG_ON(mt, mas.last != 0x2fff);
3322 	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3323 
3324 	/* next:overflow -> active (limit changed) */
3325 	entry = mas_next(&mas, ULONG_MAX);
3326 	MT_BUG_ON(mt, entry != ptr3);
3327 	MT_BUG_ON(mt, mas.index != 0x3000);
3328 	MT_BUG_ON(mt, mas.last != 0x3500);
3329 	MT_BUG_ON(mt, !mas_is_active(&mas));
3330 
3331 	/* next:active ->  overflow (limit reached) */
3332 	entry = mas_next(&mas, ULONG_MAX);
3333 	MT_BUG_ON(mt, entry != NULL);
3334 	MT_BUG_ON(mt, mas.index != 0x3501);
3335 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3336 	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3337 
3338 	/* next:overflow -> overflow  */
3339 	entry = mas_next(&mas, ULONG_MAX);
3340 	MT_BUG_ON(mt, entry != NULL);
3341 	MT_BUG_ON(mt, mas.index != 0x3501);
3342 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3343 	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3344 
3345 	/* prev:overflow -> active  */
3346 	entry = mas_prev(&mas, 0);
3347 	MT_BUG_ON(mt, entry != ptr3);
3348 	MT_BUG_ON(mt, mas.index != 0x3000);
3349 	MT_BUG_ON(mt, mas.last != 0x3500);
3350 	MT_BUG_ON(mt, !mas_is_active(&mas));
3351 
3352 	/* next: none -> active, skip value at location */
3353 	mas_set(&mas, 0);
3354 	entry = mas_next(&mas, ULONG_MAX);
3355 	mas.status = ma_none;
3356 	mas.offset = 0;
3357 	entry = mas_next(&mas, ULONG_MAX);
3358 	MT_BUG_ON(mt, entry != ptr2);
3359 	MT_BUG_ON(mt, mas.index != 0x2000);
3360 	MT_BUG_ON(mt, mas.last != 0x2500);
3361 	MT_BUG_ON(mt, !mas_is_active(&mas));
3362 
3363 	/* prev:active ->active */
3364 	entry = mas_prev(&mas, 0);
3365 	MT_BUG_ON(mt, entry != ptr);
3366 	MT_BUG_ON(mt, mas.index != 0x1000);
3367 	MT_BUG_ON(mt, mas.last != 0x1500);
3368 	MT_BUG_ON(mt, !mas_is_active(&mas));
3369 
3370 	/* prev:active -> underflow (span limit) */
3371 	mas_next(&mas, ULONG_MAX);
3372 	entry = mas_prev(&mas, 0x1200);
3373 	MT_BUG_ON(mt, entry != ptr);
3374 	MT_BUG_ON(mt, mas.index != 0x1000);
3375 	MT_BUG_ON(mt, mas.last != 0x1500);
3376 	MT_BUG_ON(mt, !mas_is_active(&mas)); /* spanning limit */
3377 	entry = mas_prev(&mas, 0x1200); /* underflow */
3378 	MT_BUG_ON(mt, entry != NULL);
3379 	MT_BUG_ON(mt, mas.index != 0x1000);
3380 	MT_BUG_ON(mt, mas.last != 0x1500);
3381 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3382 
3383 	/* prev:underflow -> underflow (lower limit) spanning end range */
3384 	entry = mas_prev(&mas, 0x0100);
3385 	MT_BUG_ON(mt, entry != NULL);
3386 	MT_BUG_ON(mt, mas.index != 0);
3387 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3388 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3389 
3390 	/* prev:underflow -> underflow */
3391 	entry = mas_prev(&mas, 0);
3392 	MT_BUG_ON(mt, entry != NULL);
3393 	MT_BUG_ON(mt, mas.index != 0);
3394 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3395 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3396 
3397 	/* prev:underflow -> underflow */
3398 	entry = mas_prev(&mas, 0);
3399 	MT_BUG_ON(mt, entry != NULL);
3400 	MT_BUG_ON(mt, mas.index != 0);
3401 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3402 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3403 
3404 	/* next:underflow -> active */
3405 	entry = mas_next(&mas, ULONG_MAX);
3406 	MT_BUG_ON(mt, entry != ptr);
3407 	MT_BUG_ON(mt, mas.index != 0x1000);
3408 	MT_BUG_ON(mt, mas.last != 0x1500);
3409 	MT_BUG_ON(mt, !mas_is_active(&mas));
3410 
3411 	/* prev:first value -> underflow */
3412 	entry = mas_prev(&mas, 0x1000);
3413 	MT_BUG_ON(mt, entry != NULL);
3414 	MT_BUG_ON(mt, mas.index != 0x1000);
3415 	MT_BUG_ON(mt, mas.last != 0x1500);
3416 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3417 
3418 	/* find:underflow -> first value */
3419 	entry = mas_find(&mas, ULONG_MAX);
3420 	MT_BUG_ON(mt, entry != ptr);
3421 	MT_BUG_ON(mt, mas.index != 0x1000);
3422 	MT_BUG_ON(mt, mas.last != 0x1500);
3423 	MT_BUG_ON(mt, !mas_is_active(&mas));
3424 
3425 	/* prev: pause ->active */
3426 	mas_set(&mas, 0x3600);
3427 	entry = mas_prev(&mas, 0);
3428 	MT_BUG_ON(mt, entry != ptr3);
3429 	mas_pause(&mas);
3430 	entry = mas_prev(&mas, 0);
3431 	MT_BUG_ON(mt, entry != ptr2);
3432 	MT_BUG_ON(mt, mas.index != 0x2000);
3433 	MT_BUG_ON(mt, mas.last != 0x2500);
3434 	MT_BUG_ON(mt, !mas_is_active(&mas));
3435 
3436 	/* prev:active -> underflow spanning min */
3437 	entry = mas_prev(&mas, 0x1600);
3438 	MT_BUG_ON(mt, entry != NULL);
3439 	MT_BUG_ON(mt, mas.index != 0x1501);
3440 	MT_BUG_ON(mt, mas.last != 0x1FFF);
3441 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3442 
3443 	/* prev: active ->active, continue */
3444 	entry = mas_prev(&mas, 0);
3445 	MT_BUG_ON(mt, entry != ptr);
3446 	MT_BUG_ON(mt, mas.index != 0x1000);
3447 	MT_BUG_ON(mt, mas.last != 0x1500);
3448 	MT_BUG_ON(mt, !mas_is_active(&mas));
3449 
3450 	/* find: start ->active */
3451 	mas_set(&mas, 0);
3452 	entry = mas_find(&mas, ULONG_MAX);
3453 	MT_BUG_ON(mt, entry != ptr);
3454 	MT_BUG_ON(mt, mas.index != 0x1000);
3455 	MT_BUG_ON(mt, mas.last != 0x1500);
3456 	MT_BUG_ON(mt, !mas_is_active(&mas));
3457 
3458 	/* find: pause ->active */
3459 	mas_set(&mas, 0);
3460 	mas_pause(&mas);
3461 	entry = mas_find(&mas, ULONG_MAX);
3462 	MT_BUG_ON(mt, entry != ptr);
3463 	MT_BUG_ON(mt, mas.index != 0x1000);
3464 	MT_BUG_ON(mt, mas.last != 0x1500);
3465 	MT_BUG_ON(mt, !mas_is_active(&mas));
3466 
3467 	/* find: start ->active on value */
3468 	mas_set(&mas, 1200);
3469 	entry = mas_find(&mas, ULONG_MAX);
3470 	MT_BUG_ON(mt, entry != ptr);
3471 	MT_BUG_ON(mt, mas.index != 0x1000);
3472 	MT_BUG_ON(mt, mas.last != 0x1500);
3473 	MT_BUG_ON(mt, !mas_is_active(&mas));
3474 
3475 	/* find:active ->active */
3476 	entry = mas_find(&mas, ULONG_MAX);
3477 	MT_BUG_ON(mt, entry != ptr2);
3478 	MT_BUG_ON(mt, mas.index != 0x2000);
3479 	MT_BUG_ON(mt, mas.last != 0x2500);
3480 	MT_BUG_ON(mt, !mas_is_active(&mas));
3481 
3482 
3483 	/* find:active -> active (NULL)*/
3484 	entry = mas_find(&mas, 0x2700);
3485 	MT_BUG_ON(mt, entry != NULL);
3486 	MT_BUG_ON(mt, mas.index != 0x2501);
3487 	MT_BUG_ON(mt, mas.last != 0x2FFF);
3488 	MAS_BUG_ON(&mas, !mas_is_active(&mas));
3489 
3490 	/* find: overflow ->active */
3491 	entry = mas_find(&mas, 0x5000);
3492 	MT_BUG_ON(mt, entry != ptr3);
3493 	MT_BUG_ON(mt, mas.index != 0x3000);
3494 	MT_BUG_ON(mt, mas.last != 0x3500);
3495 	MT_BUG_ON(mt, !mas_is_active(&mas));
3496 
3497 	/* find:active -> active (NULL) end*/
3498 	entry = mas_find(&mas, ULONG_MAX);
3499 	MT_BUG_ON(mt, entry != NULL);
3500 	MT_BUG_ON(mt, mas.index != 0x3501);
3501 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3502 	MAS_BUG_ON(&mas, !mas_is_active(&mas));
3503 
3504 	/* find_rev: active (END) ->active */
3505 	entry = mas_find_rev(&mas, 0);
3506 	MT_BUG_ON(mt, entry != ptr3);
3507 	MT_BUG_ON(mt, mas.index != 0x3000);
3508 	MT_BUG_ON(mt, mas.last != 0x3500);
3509 	MT_BUG_ON(mt, !mas_is_active(&mas));
3510 
3511 	/* find_rev:active ->active */
3512 	entry = mas_find_rev(&mas, 0);
3513 	MT_BUG_ON(mt, entry != ptr2);
3514 	MT_BUG_ON(mt, mas.index != 0x2000);
3515 	MT_BUG_ON(mt, mas.last != 0x2500);
3516 	MT_BUG_ON(mt, !mas_is_active(&mas));
3517 
3518 	/* find_rev: pause ->active */
3519 	mas_pause(&mas);
3520 	entry = mas_find_rev(&mas, 0);
3521 	MT_BUG_ON(mt, entry != ptr);
3522 	MT_BUG_ON(mt, mas.index != 0x1000);
3523 	MT_BUG_ON(mt, mas.last != 0x1500);
3524 	MT_BUG_ON(mt, !mas_is_active(&mas));
3525 
3526 	/* find_rev:active -> underflow */
3527 	entry = mas_find_rev(&mas, 0);
3528 	MT_BUG_ON(mt, entry != NULL);
3529 	MT_BUG_ON(mt, mas.index != 0);
3530 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3531 	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3532 
3533 	/* find_rev: start ->active */
3534 	mas_set(&mas, 0x1200);
3535 	entry = mas_find_rev(&mas, 0);
3536 	MT_BUG_ON(mt, entry != ptr);
3537 	MT_BUG_ON(mt, mas.index != 0x1000);
3538 	MT_BUG_ON(mt, mas.last != 0x1500);
3539 	MT_BUG_ON(mt, !mas_is_active(&mas));
3540 
3541 	/* mas_walk start ->active */
3542 	mas_set(&mas, 0x1200);
3543 	entry = mas_walk(&mas);
3544 	MT_BUG_ON(mt, entry != ptr);
3545 	MT_BUG_ON(mt, mas.index != 0x1000);
3546 	MT_BUG_ON(mt, mas.last != 0x1500);
3547 	MT_BUG_ON(mt, !mas_is_active(&mas));
3548 
3549 	/* mas_walk start ->active */
3550 	mas_set(&mas, 0x1600);
3551 	entry = mas_walk(&mas);
3552 	MT_BUG_ON(mt, entry != NULL);
3553 	MT_BUG_ON(mt, mas.index != 0x1501);
3554 	MT_BUG_ON(mt, mas.last != 0x1fff);
3555 	MT_BUG_ON(mt, !mas_is_active(&mas));
3556 
3557 	/* mas_walk pause ->active */
3558 	mas_set(&mas, 0x1200);
3559 	mas_pause(&mas);
3560 	entry = mas_walk(&mas);
3561 	MT_BUG_ON(mt, entry != ptr);
3562 	MT_BUG_ON(mt, mas.index != 0x1000);
3563 	MT_BUG_ON(mt, mas.last != 0x1500);
3564 	MT_BUG_ON(mt, !mas_is_active(&mas));
3565 
3566 	/* mas_walk pause -> active */
3567 	mas_set(&mas, 0x1600);
3568 	mas_pause(&mas);
3569 	entry = mas_walk(&mas);
3570 	MT_BUG_ON(mt, entry != NULL);
3571 	MT_BUG_ON(mt, mas.index != 0x1501);
3572 	MT_BUG_ON(mt, mas.last != 0x1fff);
3573 	MT_BUG_ON(mt, !mas_is_active(&mas));
3574 
3575 	/* mas_walk none -> active */
3576 	mas_set(&mas, 0x1200);
3577 	mas.status = ma_none;
3578 	entry = mas_walk(&mas);
3579 	MT_BUG_ON(mt, entry != ptr);
3580 	MT_BUG_ON(mt, mas.index != 0x1000);
3581 	MT_BUG_ON(mt, mas.last != 0x1500);
3582 	MT_BUG_ON(mt, !mas_is_active(&mas));
3583 
3584 	/* mas_walk none -> active */
3585 	mas_set(&mas, 0x1600);
3586 	mas.status = ma_none;
3587 	entry = mas_walk(&mas);
3588 	MT_BUG_ON(mt, entry != NULL);
3589 	MT_BUG_ON(mt, mas.index != 0x1501);
3590 	MT_BUG_ON(mt, mas.last != 0x1fff);
3591 	MT_BUG_ON(mt, !mas_is_active(&mas));
3592 
3593 	/* mas_walk active -> active */
3594 	mas.index = 0x1200;
3595 	mas.last = 0x1200;
3596 	mas.offset = 0;
3597 	entry = mas_walk(&mas);
3598 	MT_BUG_ON(mt, entry != ptr);
3599 	MT_BUG_ON(mt, mas.index != 0x1000);
3600 	MT_BUG_ON(mt, mas.last != 0x1500);
3601 	MT_BUG_ON(mt, !mas_is_active(&mas));
3602 
3603 	/* mas_walk active -> active */
3604 	mas.index = 0x1600;
3605 	mas.last = 0x1600;
3606 	entry = mas_walk(&mas);
3607 	MT_BUG_ON(mt, entry != NULL);
3608 	MT_BUG_ON(mt, mas.index != 0x1501);
3609 	MT_BUG_ON(mt, mas.last != 0x1fff);
3610 	MT_BUG_ON(mt, !mas_is_active(&mas));
3611 
3612 	mas_unlock(&mas);
3613 	mtree_destroy(mt);
3614 
3615 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3616 	mas_lock(&mas);
3617 	for (int count = 0; count < 30; count++) {
3618 		mas_set(&mas, count);
3619 		mas_store_gfp(&mas, xa_mk_value(count), GFP_KERNEL);
3620 	}
3621 
3622 	/* Ensure mas_find works with MA_UNDERFLOW */
3623 	mas_set(&mas, 0);
3624 	entry = mas_walk(&mas);
3625 	mas_set(&mas, 0);
3626 	mas_prev(&mas, 0);
3627 	MT_BUG_ON(mt, mas.status != ma_underflow);
3628 	MT_BUG_ON(mt, mas_find(&mas, ULONG_MAX) != entry);
3629 
3630 	/* Restore active on mas_next */
3631 	entry = mas_next(&mas, ULONG_MAX);
3632 	index = mas.index;
3633 	mas_prev(&mas, index);
3634 	MT_BUG_ON(mt, mas.status != ma_underflow);
3635 	MT_BUG_ON(mt, mas_next(&mas, ULONG_MAX) != entry);
3636 
3637 	/* Ensure overflow -> active works */
3638 	mas_prev(&mas, 0);
3639 	mas_next(&mas, index - 1);
3640 	MT_BUG_ON(mt, mas.status != ma_overflow);
3641 	MT_BUG_ON(mt, mas_next(&mas, ULONG_MAX) != entry);
3642 
3643 	mas_unlock(&mas);
3644 }
3645 
3646 static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
3647 {
3648 	unsigned long location;
3649 	unsigned long next;
3650 	int ret = 0;
3651 	MA_STATE(mas, mt, 0, 0);
3652 
3653 	next = 0;
3654 	mtree_lock(mt);
3655 	for (int i = 0; i < 100; i++) {
3656 		mas_alloc_cyclic(&mas, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3657 		MAS_BUG_ON(&mas, i != location - 2);
3658 		MAS_BUG_ON(&mas, mas.index != location);
3659 		MAS_BUG_ON(&mas, mas.last != location);
3660 		MAS_BUG_ON(&mas, i != next - 3);
3661 	}
3662 
3663 	mtree_unlock(mt);
3664 	mtree_destroy(mt);
3665 	next = 0;
3666 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3667 	for (int i = 0; i < 100; i++) {
3668 		mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3669 		MT_BUG_ON(mt, i != location - 2);
3670 		MT_BUG_ON(mt, i != next - 3);
3671 		MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3672 	}
3673 
3674 	mtree_destroy(mt);
3675 
3676 	/*
3677 	 * Issue with reverse search was discovered
3678 	 * https://lore.kernel.org/all/20241216060600.287B4C4CED0@smtp.kernel.org/
3679 	 * Exhausting the allocation area and forcing the search to wrap needs a
3680 	 * mas_reset() in mas_alloc_cyclic().
3681 	 */
3682 	next = 0;
3683 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3684 	for (int i = 0; i < 1023; i++) {
3685 		mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3686 		MT_BUG_ON(mt, i != location - 2);
3687 		MT_BUG_ON(mt, i != next - 3);
3688 		MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3689 	}
3690 	mtree_erase(mt, 123);
3691 	MT_BUG_ON(mt, mtree_load(mt, 123) != NULL);
3692 	mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3693 	MT_BUG_ON(mt, 123 != location);
3694 	MT_BUG_ON(mt, 124 != next);
3695 	MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3696 	mtree_erase(mt, 100);
3697 	mtree_alloc_cyclic(mt, &location, mt, 2, 1024, &next, GFP_KERNEL);
3698 	MT_BUG_ON(mt, 100 != location);
3699 	MT_BUG_ON(mt, 101 != next);
3700 	MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3701 	mtree_destroy(mt);
3702 
3703 	/* Overflow test */
3704 	next = ULONG_MAX - 1;
3705 	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3706 	MT_BUG_ON(mt, ret != 0);
3707 	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3708 	MT_BUG_ON(mt, ret != 0);
3709 	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3710 	MT_BUG_ON(mt, ret != 1);
3711 }
3712 
3713 static DEFINE_MTREE(tree);
3714 static int __init maple_tree_seed(void)
3715 {
3716 	unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3717 				1001, 1002, 1003, 1005, 0,
3718 				5003, 5002};
3719 	void *ptr = &set;
3720 
3721 	pr_info("\nTEST STARTING\n\n");
3722 
3723 #if defined(BENCH_SLOT_STORE)
3724 #define BENCH
3725 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3726 	bench_slot_store(&tree);
3727 	mtree_destroy(&tree);
3728 	goto skip;
3729 #endif
3730 #if defined(BENCH_NODE_STORE)
3731 #define BENCH
3732 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3733 	bench_node_store(&tree);
3734 	mtree_destroy(&tree);
3735 	goto skip;
3736 #endif
3737 #if defined(BENCH_AWALK)
3738 #define BENCH
3739 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3740 	bench_awalk(&tree);
3741 	mtree_destroy(&tree);
3742 	goto skip;
3743 #endif
3744 #if defined(BENCH_WALK)
3745 #define BENCH
3746 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3747 	bench_walk(&tree);
3748 	mtree_destroy(&tree);
3749 	goto skip;
3750 #endif
3751 #if defined(BENCH_LOAD)
3752 #define BENCH
3753 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3754 	bench_load(&tree);
3755 	mtree_destroy(&tree);
3756 	goto skip;
3757 #endif
3758 #if defined(BENCH_FORK)
3759 #define BENCH
3760 	bench_forking();
3761 	goto skip;
3762 #endif
3763 #if defined(BENCH_MT_FOR_EACH)
3764 #define BENCH
3765 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3766 	bench_mt_for_each(&tree);
3767 	mtree_destroy(&tree);
3768 	goto skip;
3769 #endif
3770 #if defined(BENCH_MAS_FOR_EACH)
3771 #define BENCH
3772 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3773 	bench_mas_for_each(&tree);
3774 	mtree_destroy(&tree);
3775 	goto skip;
3776 #endif
3777 #if defined(BENCH_MAS_PREV)
3778 #define BENCH
3779 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3780 	bench_mas_prev(&tree);
3781 	mtree_destroy(&tree);
3782 	goto skip;
3783 #endif
3784 
3785 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3786 	check_deficient_node(&tree);
3787 	mtree_destroy(&tree);
3788 
3789 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3790 	check_store_null(&tree);
3791 	mtree_destroy(&tree);
3792 
3793 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3794 	check_root_expand(&tree);
3795 	mtree_destroy(&tree);
3796 
3797 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3798 	check_iteration(&tree);
3799 	mtree_destroy(&tree);
3800 
3801 	check_forking();
3802 
3803 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3804 	check_mas_store_gfp(&tree);
3805 	mtree_destroy(&tree);
3806 
3807 	/* Test ranges (store and insert) */
3808 	mt_init_flags(&tree, 0);
3809 	check_ranges(&tree);
3810 	mtree_destroy(&tree);
3811 
3812 #if defined(CONFIG_64BIT)
3813 	/* These tests have ranges outside of 4GB */
3814 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3815 	check_alloc_range(&tree);
3816 	mtree_destroy(&tree);
3817 
3818 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3819 	check_alloc_rev_range(&tree);
3820 	mtree_destroy(&tree);
3821 #endif
3822 
3823 	mt_init_flags(&tree, 0);
3824 
3825 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3826 
3827 	check_insert(&tree, set[9], &tree);     /* Insert 0 */
3828 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3829 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3830 
3831 	check_insert(&tree, set[10], ptr);      /* Insert 5003 */
3832 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3833 	check_load(&tree, set[11], NULL);       /* See if 5002 -> NULL */
3834 	check_load(&tree, set[10], ptr);       /* See if 5003 -> ptr */
3835 
3836 	/* Clear out the tree */
3837 	mtree_destroy(&tree);
3838 
3839 	/* Try to insert, insert a dup, and load back what was inserted. */
3840 	mt_init_flags(&tree, 0);
3841 	check_insert(&tree, set[0], &tree);     /* Insert 5015 */
3842 	check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3843 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3844 
3845 	/*
3846 	 * Second set of tests try to load a value that doesn't exist, inserts
3847 	 * a second value, then loads the value again
3848 	 */
3849 	check_load(&tree, set[1], NULL);        /* See if 5014 -> NULL */
3850 	check_insert(&tree, set[1], ptr);       /* insert 5014 -> ptr */
3851 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3852 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3853 	/*
3854 	 * Tree currently contains:
3855 	 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3856 	 */
3857 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3858 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3859 
3860 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3861 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3862 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3863 	check_load(&tree, set[7], &tree);       /* 1003 = &tree ? */
3864 
3865 	/* Clear out tree */
3866 	mtree_destroy(&tree);
3867 
3868 	mt_init_flags(&tree, 0);
3869 	/* Test inserting into a NULL hole. */
3870 	check_insert(&tree, set[5], ptr);       /* insert 1001 -> ptr */
3871 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3872 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3873 	check_load(&tree, set[5], ptr);         /* See if 1001 -> ptr */
3874 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3875 	check_load(&tree, set[7], &tree);       /* See if 1003 -> &tree */
3876 
3877 	/* Clear out the tree */
3878 	mtree_destroy(&tree);
3879 
3880 	mt_init_flags(&tree, 0);
3881 	/*
3882 	 *       set[] = {5015, 5014, 5017, 25, 1000,
3883 	 *                1001, 1002, 1003, 1005, 0,
3884 	 *                5003, 5002};
3885 	 */
3886 
3887 	check_insert(&tree, set[0], ptr); /* 5015 */
3888 	check_insert(&tree, set[1], &tree); /* 5014 */
3889 	check_insert(&tree, set[2], ptr); /* 5017 */
3890 	check_insert(&tree, set[3], &tree); /* 25 */
3891 	check_load(&tree, set[0], ptr);
3892 	check_load(&tree, set[1], &tree);
3893 	check_load(&tree, set[2], ptr);
3894 	check_load(&tree, set[3], &tree);
3895 	check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3896 	check_load(&tree, set[0], ptr);
3897 	check_load(&tree, set[1], &tree);
3898 	check_load(&tree, set[2], ptr);
3899 	check_load(&tree, set[3], &tree); /*25 */
3900 	check_load(&tree, set[4], ptr);
3901 	check_insert(&tree, set[5], &tree); /* 1001 */
3902 	check_load(&tree, set[0], ptr);
3903 	check_load(&tree, set[1], &tree);
3904 	check_load(&tree, set[2], ptr);
3905 	check_load(&tree, set[3], &tree);
3906 	check_load(&tree, set[4], ptr);
3907 	check_load(&tree, set[5], &tree);
3908 	check_insert(&tree, set[6], ptr);
3909 	check_load(&tree, set[0], ptr);
3910 	check_load(&tree, set[1], &tree);
3911 	check_load(&tree, set[2], ptr);
3912 	check_load(&tree, set[3], &tree);
3913 	check_load(&tree, set[4], ptr);
3914 	check_load(&tree, set[5], &tree);
3915 	check_load(&tree, set[6], ptr);
3916 	check_insert(&tree, set[7], &tree);
3917 	check_load(&tree, set[0], ptr);
3918 	check_insert(&tree, set[8], ptr);
3919 
3920 	check_insert(&tree, set[9], &tree);
3921 
3922 	check_load(&tree, set[0], ptr);
3923 	check_load(&tree, set[1], &tree);
3924 	check_load(&tree, set[2], ptr);
3925 	check_load(&tree, set[3], &tree);
3926 	check_load(&tree, set[4], ptr);
3927 	check_load(&tree, set[5], &tree);
3928 	check_load(&tree, set[6], ptr);
3929 	check_load(&tree, set[9], &tree);
3930 	mtree_destroy(&tree);
3931 
3932 	mt_init_flags(&tree, 0);
3933 	check_seq(&tree, 16, false);
3934 	mtree_destroy(&tree);
3935 
3936 	mt_init_flags(&tree, 0);
3937 	check_seq(&tree, 1000, true);
3938 	mtree_destroy(&tree);
3939 
3940 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3941 	check_rev_seq(&tree, 1000, true);
3942 	mtree_destroy(&tree);
3943 
3944 	check_lower_bound_split(&tree);
3945 	check_upper_bound_split(&tree);
3946 	check_mid_split(&tree);
3947 
3948 	mt_init_flags(&tree, 0);
3949 	check_next_entry(&tree);
3950 	check_find(&tree);
3951 	check_find_2(&tree);
3952 	mtree_destroy(&tree);
3953 
3954 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3955 	check_prev_entry(&tree);
3956 	mtree_destroy(&tree);
3957 
3958 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3959 	check_gap_combining(&tree);
3960 	mtree_destroy(&tree);
3961 
3962 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3963 	check_node_overwrite(&tree);
3964 	mtree_destroy(&tree);
3965 
3966 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3967 	next_prev_test(&tree);
3968 	mtree_destroy(&tree);
3969 
3970 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3971 	check_spanning_relatives(&tree);
3972 	mtree_destroy(&tree);
3973 
3974 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3975 	check_rev_find(&tree);
3976 	mtree_destroy(&tree);
3977 
3978 	mt_init_flags(&tree, 0);
3979 	check_fuzzer(&tree);
3980 	mtree_destroy(&tree);
3981 
3982 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3983 	check_bnode_min_spanning(&tree);
3984 	mtree_destroy(&tree);
3985 
3986 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3987 	check_empty_area_window(&tree);
3988 	mtree_destroy(&tree);
3989 
3990 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3991 	check_empty_area_fill(&tree);
3992 	mtree_destroy(&tree);
3993 
3994 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3995 	check_state_handling(&tree);
3996 	mtree_destroy(&tree);
3997 
3998 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3999 	alloc_cyclic_testing(&tree);
4000 	mtree_destroy(&tree);
4001 
4002 
4003 #if defined(BENCH)
4004 skip:
4005 #endif
4006 	rcu_barrier();
4007 	pr_info("maple_tree: %u of %u tests passed\n",
4008 			atomic_read(&maple_tree_tests_passed),
4009 			atomic_read(&maple_tree_tests_run));
4010 	if (atomic_read(&maple_tree_tests_run) ==
4011 	    atomic_read(&maple_tree_tests_passed))
4012 		return 0;
4013 
4014 	return -EINVAL;
4015 }
4016 
4017 static void __exit maple_tree_harvest(void)
4018 {
4019 
4020 }
4021 
4022 module_init(maple_tree_seed);
4023 module_exit(maple_tree_harvest);
4024 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
4025 MODULE_DESCRIPTION("maple tree API test module");
4026 MODULE_LICENSE("GPL");
4027