• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * test_maple_tree.c: Test the maple tree API
4  * Copyright (c) 2018-2022 Oracle Corporation
5  * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6  *
7  * Any tests that only require the interface of the tree.
8  */
9 
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
13 
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17 
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt)		do {} while (0)
20 #define mt_validate(mt)			do {} while (0)
21 #define mt_cache_shrink()		do {} while (0)
22 #define mas_dump(mas)			do {} while (0)
23 #define mas_wr_dump(mas)		do {} while (0)
24 atomic_t maple_tree_tests_run;
25 atomic_t maple_tree_tests_passed;
26 #undef MT_BUG_ON
27 
28 #define MT_BUG_ON(__tree, __x) do {					\
29 	atomic_inc(&maple_tree_tests_run);				\
30 	if (__x) {							\
31 		pr_info("BUG at %s:%d (%u)\n",				\
32 		__func__, __LINE__, __x);				\
33 		pr_info("Pass: %u Run:%u\n",				\
34 			atomic_read(&maple_tree_tests_passed),		\
35 			atomic_read(&maple_tree_tests_run));		\
36 	} else {							\
37 		atomic_inc(&maple_tree_tests_passed);			\
38 	}								\
39 } while (0)
40 #endif
41 
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_MT_FOR_EACH */
47 /* #define BENCH_FORK */
48 /* #define BENCH_MAS_FOR_EACH */
49 /* #define BENCH_MAS_PREV */
50 
51 #ifdef __KERNEL__
52 #define mt_set_non_kernel(x)		do {} while (0)
53 #define mt_zero_nr_tallocated(x)	do {} while (0)
54 #else
55 #define cond_resched()			do {} while (0)
56 #endif
mtree_insert_index(struct maple_tree * mt,unsigned long index,gfp_t gfp)57 static int __init mtree_insert_index(struct maple_tree *mt,
58 				     unsigned long index, gfp_t gfp)
59 {
60 	return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
61 }
62 
mtree_erase_index(struct maple_tree * mt,unsigned long index)63 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
64 {
65 	MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
66 	MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
67 }
68 
mtree_test_insert(struct maple_tree * mt,unsigned long index,void * ptr)69 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
70 				void *ptr)
71 {
72 	return mtree_insert(mt, index, ptr, GFP_KERNEL);
73 }
74 
mtree_test_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)75 static int __init mtree_test_store_range(struct maple_tree *mt,
76 			unsigned long start, unsigned long end, void *ptr)
77 {
78 	return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
79 }
80 
mtree_test_store(struct maple_tree * mt,unsigned long start,void * ptr)81 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
82 				void *ptr)
83 {
84 	return mtree_test_store_range(mt, start, start, ptr);
85 }
86 
mtree_test_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)87 static int __init mtree_test_insert_range(struct maple_tree *mt,
88 			unsigned long start, unsigned long end, void *ptr)
89 {
90 	return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
91 }
92 
mtree_test_load(struct maple_tree * mt,unsigned long index)93 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
94 {
95 	return mtree_load(mt, index);
96 }
97 
mtree_test_erase(struct maple_tree * mt,unsigned long index)98 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
99 {
100 	return mtree_erase(mt, index);
101 }
102 
103 #if defined(CONFIG_64BIT)
check_mtree_alloc_range(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)104 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
105 		unsigned long start, unsigned long end, unsigned long size,
106 		unsigned long expected, int eret, void *ptr)
107 {
108 
109 	unsigned long result = expected + 1;
110 	int ret;
111 
112 	ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
113 			GFP_KERNEL);
114 	MT_BUG_ON(mt, ret != eret);
115 	if (ret)
116 		return;
117 
118 	MT_BUG_ON(mt, result != expected);
119 }
120 
check_mtree_alloc_rrange(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)121 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
122 		unsigned long start, unsigned long end, unsigned long size,
123 		unsigned long expected, int eret, void *ptr)
124 {
125 
126 	unsigned long result = expected + 1;
127 	int ret;
128 
129 	ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
130 			GFP_KERNEL);
131 	MT_BUG_ON(mt, ret != eret);
132 	if (ret)
133 		return;
134 
135 	MT_BUG_ON(mt, result != expected);
136 }
137 #endif
138 
check_load(struct maple_tree * mt,unsigned long index,void * ptr)139 static noinline void __init check_load(struct maple_tree *mt,
140 				       unsigned long index, void *ptr)
141 {
142 	void *ret = mtree_test_load(mt, index);
143 
144 	if (ret != ptr)
145 		pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
146 	MT_BUG_ON(mt, ret != ptr);
147 }
148 
check_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)149 static noinline void __init check_store_range(struct maple_tree *mt,
150 		unsigned long start, unsigned long end, void *ptr, int expected)
151 {
152 	int ret = -EINVAL;
153 	unsigned long i;
154 
155 	ret = mtree_test_store_range(mt, start, end, ptr);
156 	MT_BUG_ON(mt, ret != expected);
157 
158 	if (ret)
159 		return;
160 
161 	for (i = start; i <= end; i++)
162 		check_load(mt, i, ptr);
163 }
164 
check_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)165 static noinline void __init check_insert_range(struct maple_tree *mt,
166 		unsigned long start, unsigned long end, void *ptr, int expected)
167 {
168 	int ret = -EINVAL;
169 	unsigned long i;
170 
171 	ret = mtree_test_insert_range(mt, start, end, ptr);
172 	MT_BUG_ON(mt, ret != expected);
173 
174 	if (ret)
175 		return;
176 
177 	for (i = start; i <= end; i++)
178 		check_load(mt, i, ptr);
179 }
180 
check_insert(struct maple_tree * mt,unsigned long index,void * ptr)181 static noinline void __init check_insert(struct maple_tree *mt,
182 					 unsigned long index, void *ptr)
183 {
184 	int ret = -EINVAL;
185 
186 	ret = mtree_test_insert(mt, index, ptr);
187 	MT_BUG_ON(mt, ret != 0);
188 }
189 
check_dup_insert(struct maple_tree * mt,unsigned long index,void * ptr)190 static noinline void __init check_dup_insert(struct maple_tree *mt,
191 				      unsigned long index, void *ptr)
192 {
193 	int ret = -EINVAL;
194 
195 	ret = mtree_test_insert(mt, index, ptr);
196 	MT_BUG_ON(mt, ret != -EEXIST);
197 }
198 
199 
check_index_load(struct maple_tree * mt,unsigned long index)200 static noinline void __init check_index_load(struct maple_tree *mt,
201 					     unsigned long index)
202 {
203 	return check_load(mt, index, xa_mk_value(index & LONG_MAX));
204 }
205 
not_empty(struct maple_node * node)206 static inline __init int not_empty(struct maple_node *node)
207 {
208 	int i;
209 
210 	if (node->parent)
211 		return 1;
212 
213 	for (i = 0; i < ARRAY_SIZE(node->slot); i++)
214 		if (node->slot[i])
215 			return 1;
216 
217 	return 0;
218 }
219 
220 
check_rev_seq(struct maple_tree * mt,unsigned long max,bool verbose)221 static noinline void __init check_rev_seq(struct maple_tree *mt,
222 					  unsigned long max, bool verbose)
223 {
224 	unsigned long i = max, j;
225 
226 	MT_BUG_ON(mt, !mtree_empty(mt));
227 
228 	mt_zero_nr_tallocated();
229 	while (i) {
230 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
231 		for (j = i; j <= max; j++)
232 			check_index_load(mt, j);
233 
234 		check_load(mt, i - 1, NULL);
235 		mt_set_in_rcu(mt);
236 		MT_BUG_ON(mt, !mt_height(mt));
237 		mt_clear_in_rcu(mt);
238 		MT_BUG_ON(mt, !mt_height(mt));
239 		i--;
240 	}
241 	check_load(mt, max + 1, NULL);
242 
243 #ifndef __KERNEL__
244 	if (verbose) {
245 		rcu_barrier();
246 		mt_dump(mt, mt_dump_dec);
247 		pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
248 			__func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
249 			mt_nr_tallocated());
250 	}
251 #endif
252 }
253 
check_seq(struct maple_tree * mt,unsigned long max,bool verbose)254 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
255 		bool verbose)
256 {
257 	unsigned long i, j;
258 
259 	MT_BUG_ON(mt, !mtree_empty(mt));
260 
261 	mt_zero_nr_tallocated();
262 	for (i = 0; i <= max; i++) {
263 		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
264 		for (j = 0; j <= i; j++)
265 			check_index_load(mt, j);
266 
267 		if (i)
268 			MT_BUG_ON(mt, !mt_height(mt));
269 		check_load(mt, i + 1, NULL);
270 	}
271 
272 #ifndef __KERNEL__
273 	if (verbose) {
274 		rcu_barrier();
275 		mt_dump(mt, mt_dump_dec);
276 		pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
277 			max, mt_get_alloc_size()/1024, mt_nr_allocated(),
278 			mt_nr_tallocated());
279 	}
280 #endif
281 }
282 
check_lb_not_empty(struct maple_tree * mt)283 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
284 {
285 	unsigned long i, j;
286 	unsigned long huge = 4000UL * 1000 * 1000;
287 
288 
289 	i = huge;
290 	while (i > 4096) {
291 		check_insert(mt, i, (void *) i);
292 		for (j = huge; j >= i; j /= 2) {
293 			check_load(mt, j-1, NULL);
294 			check_load(mt, j, (void *) j);
295 			check_load(mt, j+1, NULL);
296 		}
297 		i /= 2;
298 	}
299 	mtree_destroy(mt);
300 }
301 
check_lower_bound_split(struct maple_tree * mt)302 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
303 {
304 	MT_BUG_ON(mt, !mtree_empty(mt));
305 	check_lb_not_empty(mt);
306 }
307 
check_upper_bound_split(struct maple_tree * mt)308 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
309 {
310 	unsigned long i, j;
311 	unsigned long huge;
312 
313 	MT_BUG_ON(mt, !mtree_empty(mt));
314 
315 	if (MAPLE_32BIT)
316 		huge = 2147483647UL;
317 	else
318 		huge = 4000UL * 1000 * 1000;
319 
320 	i = 4096;
321 	while (i < huge) {
322 		check_insert(mt, i, (void *) i);
323 		for (j = i; j >= huge; j *= 2) {
324 			check_load(mt, j-1, NULL);
325 			check_load(mt, j, (void *) j);
326 			check_load(mt, j+1, NULL);
327 		}
328 		i *= 2;
329 	}
330 	mtree_destroy(mt);
331 }
332 
check_mid_split(struct maple_tree * mt)333 static noinline void __init check_mid_split(struct maple_tree *mt)
334 {
335 	unsigned long huge = 8000UL * 1000 * 1000;
336 
337 	check_insert(mt, huge, (void *) huge);
338 	check_insert(mt, 0, xa_mk_value(0));
339 	check_lb_not_empty(mt);
340 }
341 
check_rev_find(struct maple_tree * mt)342 static noinline void __init check_rev_find(struct maple_tree *mt)
343 {
344 	int i, nr_entries = 200;
345 	void *val;
346 	MA_STATE(mas, mt, 0, 0);
347 
348 	for (i = 0; i <= nr_entries; i++)
349 		mtree_store_range(mt, i*10, i*10 + 5,
350 				  xa_mk_value(i), GFP_KERNEL);
351 
352 	rcu_read_lock();
353 	mas_set(&mas, 1000);
354 	val = mas_find_rev(&mas, 1000);
355 	MT_BUG_ON(mt, val != xa_mk_value(100));
356 	val = mas_find_rev(&mas, 1000);
357 	MT_BUG_ON(mt, val != NULL);
358 
359 	mas_set(&mas, 999);
360 	val = mas_find_rev(&mas, 997);
361 	MT_BUG_ON(mt, val != NULL);
362 
363 	mas_set(&mas, 1000);
364 	val = mas_find_rev(&mas, 900);
365 	MT_BUG_ON(mt, val != xa_mk_value(100));
366 	val = mas_find_rev(&mas, 900);
367 	MT_BUG_ON(mt, val != xa_mk_value(99));
368 
369 	mas_set(&mas, 20);
370 	val = mas_find_rev(&mas, 0);
371 	MT_BUG_ON(mt, val != xa_mk_value(2));
372 	val = mas_find_rev(&mas, 0);
373 	MT_BUG_ON(mt, val != xa_mk_value(1));
374 	val = mas_find_rev(&mas, 0);
375 	MT_BUG_ON(mt, val != xa_mk_value(0));
376 	val = mas_find_rev(&mas, 0);
377 	MT_BUG_ON(mt, val != NULL);
378 	rcu_read_unlock();
379 }
380 
check_find(struct maple_tree * mt)381 static noinline void __init check_find(struct maple_tree *mt)
382 {
383 	unsigned long val = 0;
384 	unsigned long count;
385 	unsigned long max;
386 	unsigned long top;
387 	unsigned long last = 0, index = 0;
388 	void *entry, *entry2;
389 
390 	MA_STATE(mas, mt, 0, 0);
391 
392 	/* Insert 0. */
393 	MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
394 
395 #if defined(CONFIG_64BIT)
396 	top = 4398046511104UL;
397 #else
398 	top = ULONG_MAX;
399 #endif
400 
401 	if (MAPLE_32BIT) {
402 		count = 15;
403 	} else {
404 		count = 20;
405 	}
406 
407 	for (int i = 0; i <= count; i++) {
408 		if (val != 64)
409 			MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
410 		else
411 			MT_BUG_ON(mt, mtree_insert(mt, val,
412 				XA_ZERO_ENTRY, GFP_KERNEL));
413 
414 		val <<= 2;
415 	}
416 
417 	val = 0;
418 	mas_set(&mas, val);
419 	mas_lock(&mas);
420 	while ((entry = mas_find(&mas, 268435456)) != NULL) {
421 		if (val != 64)
422 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
423 		else
424 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
425 
426 		val <<= 2;
427 		/* For zero check. */
428 		if (!val)
429 			val = 1;
430 	}
431 	mas_unlock(&mas);
432 
433 	val = 0;
434 	mas_set(&mas, val);
435 	mas_lock(&mas);
436 	mas_for_each(&mas, entry, ULONG_MAX) {
437 		if (val != 64)
438 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
439 		else
440 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
441 		val <<= 2;
442 		/* For zero check. */
443 		if (!val)
444 			val = 1;
445 	}
446 	mas_unlock(&mas);
447 
448 	/* Test mas_pause */
449 	val = 0;
450 	mas_set(&mas, val);
451 	mas_lock(&mas);
452 	mas_for_each(&mas, entry, ULONG_MAX) {
453 		if (val != 64)
454 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
455 		else
456 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
457 		val <<= 2;
458 		/* For zero check. */
459 		if (!val)
460 			val = 1;
461 
462 		mas_pause(&mas);
463 		mas_unlock(&mas);
464 		mas_lock(&mas);
465 	}
466 	mas_unlock(&mas);
467 
468 	val = 0;
469 	max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
470 	mt_for_each(mt, entry, index, max) {
471 		MT_BUG_ON(mt, xa_mk_value(val) != entry);
472 		val <<= 2;
473 		if (val == 64) /* Skip zero entry. */
474 			val <<= 2;
475 		/* For zero check. */
476 		if (!val)
477 			val = 1;
478 	}
479 
480 	val = 0;
481 	max = 0;
482 	index = 0;
483 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
484 	mt_for_each(mt, entry, index, ULONG_MAX) {
485 		if (val == top)
486 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
487 		else
488 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
489 
490 		/* Workaround for 32bit */
491 		if ((val << 2) < val)
492 			val = ULONG_MAX;
493 		else
494 			val <<= 2;
495 
496 		if (val == 64) /* Skip zero entry. */
497 			val <<= 2;
498 		/* For zero check. */
499 		if (!val)
500 			val = 1;
501 		max++;
502 		MT_BUG_ON(mt, max > 25);
503 	}
504 	mtree_erase_index(mt, ULONG_MAX);
505 
506 	mas_reset(&mas);
507 	index = 17;
508 	entry = mt_find(mt, &index, 512);
509 	MT_BUG_ON(mt, xa_mk_value(256) != entry);
510 
511 	mas_reset(&mas);
512 	index = 17;
513 	entry = mt_find(mt, &index, 20);
514 	MT_BUG_ON(mt, entry != NULL);
515 
516 
517 	/* Range check.. */
518 	/* Insert ULONG_MAX */
519 	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
520 
521 	val = 0;
522 	mas_set(&mas, 0);
523 	mas_lock(&mas);
524 	mas_for_each(&mas, entry, ULONG_MAX) {
525 		if (val == 64)
526 			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
527 		else if (val == top)
528 			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
529 		else
530 			MT_BUG_ON(mt, xa_mk_value(val) != entry);
531 
532 		/* Workaround for 32bit */
533 		if ((val << 2) < val)
534 			val = ULONG_MAX;
535 		else
536 			val <<= 2;
537 
538 		/* For zero check. */
539 		if (!val)
540 			val = 1;
541 		mas_pause(&mas);
542 		mas_unlock(&mas);
543 		mas_lock(&mas);
544 	}
545 	mas_unlock(&mas);
546 
547 	mas_set(&mas, 1048576);
548 	mas_lock(&mas);
549 	entry = mas_find(&mas, 1048576);
550 	mas_unlock(&mas);
551 	MT_BUG_ON(mas.tree, entry == NULL);
552 
553 	/*
554 	 * Find last value.
555 	 * 1. get the expected value, leveraging the existence of an end entry
556 	 * 2. delete end entry
557 	 * 3. find the last value but searching for ULONG_MAX and then using
558 	 * prev
559 	 */
560 	/* First, get the expected result. */
561 	mas_lock(&mas);
562 	mas_reset(&mas);
563 	mas.index = ULONG_MAX; /* start at max.. */
564 	entry = mas_find(&mas, ULONG_MAX);
565 	entry = mas_prev(&mas, 0);
566 	index = mas.index;
567 	last = mas.last;
568 
569 	/* Erase the last entry. */
570 	mas_reset(&mas);
571 	mas.index = ULONG_MAX;
572 	mas.last = ULONG_MAX;
573 	mas_erase(&mas);
574 
575 	/* Get the previous value from MAS_START */
576 	mas_reset(&mas);
577 	entry2 = mas_prev(&mas, 0);
578 
579 	/* Check results. */
580 	MT_BUG_ON(mt, entry != entry2);
581 	MT_BUG_ON(mt, index != mas.index);
582 	MT_BUG_ON(mt, last != mas.last);
583 
584 
585 	mas.node = MAS_NONE;
586 	mas.index = ULONG_MAX;
587 	mas.last = ULONG_MAX;
588 	entry2 = mas_prev(&mas, 0);
589 	MT_BUG_ON(mt, entry != entry2);
590 
591 	mas_set(&mas, 0);
592 	MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
593 
594 	mas_unlock(&mas);
595 	mtree_destroy(mt);
596 }
597 
check_find_2(struct maple_tree * mt)598 static noinline void __init check_find_2(struct maple_tree *mt)
599 {
600 	unsigned long i, j;
601 	void *entry;
602 
603 	MA_STATE(mas, mt, 0, 0);
604 	rcu_read_lock();
605 	mas_for_each(&mas, entry, ULONG_MAX)
606 		MT_BUG_ON(mt, true);
607 	rcu_read_unlock();
608 
609 	for (i = 0; i < 256; i++) {
610 		mtree_insert_index(mt, i, GFP_KERNEL);
611 		j = 0;
612 		mas_set(&mas, 0);
613 		rcu_read_lock();
614 		mas_for_each(&mas, entry, ULONG_MAX) {
615 			MT_BUG_ON(mt, entry != xa_mk_value(j));
616 			j++;
617 		}
618 		rcu_read_unlock();
619 		MT_BUG_ON(mt, j != i + 1);
620 	}
621 
622 	for (i = 0; i < 256; i++) {
623 		mtree_erase_index(mt, i);
624 		j = i + 1;
625 		mas_set(&mas, 0);
626 		rcu_read_lock();
627 		mas_for_each(&mas, entry, ULONG_MAX) {
628 			if (xa_is_zero(entry))
629 				continue;
630 
631 			MT_BUG_ON(mt, entry != xa_mk_value(j));
632 			j++;
633 		}
634 		rcu_read_unlock();
635 		MT_BUG_ON(mt, j != 256);
636 	}
637 
638 	/*MT_BUG_ON(mt, !mtree_empty(mt)); */
639 }
640 
641 
642 #if defined(CONFIG_64BIT)
check_alloc_rev_range(struct maple_tree * mt)643 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
644 {
645 	/*
646 	 * Generated by:
647 	 * cat /proc/self/maps | awk '{print $1}'|
648 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
649 	 */
650 
651 	static const unsigned long range[] = {
652 	/*      Inclusive     , Exclusive. */
653 		0x565234af2000, 0x565234af4000,
654 		0x565234af4000, 0x565234af9000,
655 		0x565234af9000, 0x565234afb000,
656 		0x565234afc000, 0x565234afd000,
657 		0x565234afd000, 0x565234afe000,
658 		0x565235def000, 0x565235e10000,
659 		0x7f36d4bfd000, 0x7f36d4ee2000,
660 		0x7f36d4ee2000, 0x7f36d4f04000,
661 		0x7f36d4f04000, 0x7f36d504c000,
662 		0x7f36d504c000, 0x7f36d5098000,
663 		0x7f36d5098000, 0x7f36d5099000,
664 		0x7f36d5099000, 0x7f36d509d000,
665 		0x7f36d509d000, 0x7f36d509f000,
666 		0x7f36d509f000, 0x7f36d50a5000,
667 		0x7f36d50b9000, 0x7f36d50db000,
668 		0x7f36d50db000, 0x7f36d50dc000,
669 		0x7f36d50dc000, 0x7f36d50fa000,
670 		0x7f36d50fa000, 0x7f36d5102000,
671 		0x7f36d5102000, 0x7f36d5103000,
672 		0x7f36d5103000, 0x7f36d5104000,
673 		0x7f36d5104000, 0x7f36d5105000,
674 		0x7fff5876b000, 0x7fff5878d000,
675 		0x7fff5878e000, 0x7fff58791000,
676 		0x7fff58791000, 0x7fff58793000,
677 	};
678 
679 	static const unsigned long holes[] = {
680 		/*
681 		 * Note: start of hole is INCLUSIVE
682 		 *        end of hole is EXCLUSIVE
683 		 *        (opposite of the above table.)
684 		 * Start of hole, end of hole,  size of hole (+1)
685 		 */
686 		0x565234afb000, 0x565234afc000, 0x1000,
687 		0x565234afe000, 0x565235def000, 0x12F1000,
688 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
689 	};
690 
691 	/*
692 	 * req_range consists of 4 values.
693 	 * 1. min index
694 	 * 2. max index
695 	 * 3. size
696 	 * 4. number that should be returned.
697 	 * 5. return value
698 	 */
699 	static const unsigned long req_range[] = {
700 		0x565234af9000, /* Min */
701 		0x7fff58791000, /* Max */
702 		0x1000,         /* Size */
703 		0x7fff5878d << 12,  /* First rev hole of size 0x1000 */
704 		0,              /* Return value success. */
705 
706 		0x0,            /* Min */
707 		0x565234AF0 << 12,    /* Max */
708 		0x3000,         /* Size */
709 		0x565234AEE << 12,  /* max - 3. */
710 		0,              /* Return value success. */
711 
712 		0x0,            /* Min */
713 		-1,             /* Max */
714 		0x1000,         /* Size */
715 		562949953421311 << 12,/* First rev hole of size 0x1000 */
716 		0,              /* Return value success. */
717 
718 		0x0,            /* Min */
719 		0x7F36D5109 << 12,    /* Max */
720 		0x4000,         /* Size */
721 		0x7F36D5106 << 12,    /* First rev hole of size 0x4000 */
722 		0,              /* Return value success. */
723 
724 		/* Ascend test. */
725 		0x0,
726 		34148798628 << 12,
727 		19 << 12,
728 		34148797418 << 12,
729 		0x0,
730 
731 		/* Too big test. */
732 		0x0,
733 		18446744073709551615UL,
734 		562915594369134UL << 12,
735 		0x0,
736 		-EBUSY,
737 
738 		/* Single space test. */
739 		34148798725 << 12,
740 		34148798725 << 12,
741 		1 << 12,
742 		34148798725 << 12,
743 		0,
744 	};
745 
746 	int i, range_count = ARRAY_SIZE(range);
747 	int req_range_count = ARRAY_SIZE(req_range);
748 	unsigned long min = 0;
749 
750 	MA_STATE(mas, mt, 0, 0);
751 
752 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
753 			  GFP_KERNEL);
754 #define DEBUG_REV_RANGE 0
755 	for (i = 0; i < range_count; i += 2) {
756 		/* Inclusive, Inclusive (with the -1) */
757 
758 #if DEBUG_REV_RANGE
759 		pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
760 				(range[i + 1] >> 12) - 1);
761 #endif
762 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
763 				xa_mk_value(range[i] >> 12), 0);
764 		mt_validate(mt);
765 	}
766 
767 
768 	mas_lock(&mas);
769 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
770 #if DEBUG_REV_RANGE
771 		pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
772 				min, holes[i+1]>>12, holes[i+2]>>12,
773 				holes[i] >> 12);
774 #endif
775 		MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
776 					holes[i+1] >> 12,
777 					holes[i+2] >> 12));
778 #if DEBUG_REV_RANGE
779 		pr_debug("Found %lu %lu\n", mas.index, mas.last);
780 		pr_debug("gap %lu %lu\n", (holes[i] >> 12),
781 				(holes[i+1] >> 12));
782 #endif
783 		MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
784 		MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
785 		min = holes[i+1] >> 12;
786 		mas_reset(&mas);
787 	}
788 
789 	mas_unlock(&mas);
790 	for (i = 0; i < req_range_count; i += 5) {
791 #if DEBUG_REV_RANGE
792 		pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
793 				i, req_range[i] >> 12,
794 				(req_range[i + 1] >> 12),
795 				req_range[i+2] >> 12,
796 				req_range[i+3] >> 12);
797 #endif
798 		check_mtree_alloc_rrange(mt,
799 				req_range[i]   >> 12, /* start */
800 				req_range[i+1] >> 12, /* end */
801 				req_range[i+2] >> 12, /* size */
802 				req_range[i+3] >> 12, /* expected address */
803 				req_range[i+4],       /* expected return */
804 				xa_mk_value(req_range[i] >> 12)); /* pointer */
805 		mt_validate(mt);
806 	}
807 
808 	mt_set_non_kernel(1);
809 	mtree_erase(mt, 34148798727); /* create a deleted range. */
810 	mtree_erase(mt, 34148798725);
811 	check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
812 			34148798725, 0, mt);
813 
814 	mtree_destroy(mt);
815 }
816 
check_alloc_range(struct maple_tree * mt)817 static noinline void __init check_alloc_range(struct maple_tree *mt)
818 {
819 	/*
820 	 * Generated by:
821 	 * cat /proc/self/maps|awk '{print $1}'|
822 	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
823 	 */
824 
825 	static const unsigned long range[] = {
826 	/*      Inclusive     , Exclusive. */
827 		0x565234af2000, 0x565234af4000,
828 		0x565234af4000, 0x565234af9000,
829 		0x565234af9000, 0x565234afb000,
830 		0x565234afc000, 0x565234afd000,
831 		0x565234afd000, 0x565234afe000,
832 		0x565235def000, 0x565235e10000,
833 		0x7f36d4bfd000, 0x7f36d4ee2000,
834 		0x7f36d4ee2000, 0x7f36d4f04000,
835 		0x7f36d4f04000, 0x7f36d504c000,
836 		0x7f36d504c000, 0x7f36d5098000,
837 		0x7f36d5098000, 0x7f36d5099000,
838 		0x7f36d5099000, 0x7f36d509d000,
839 		0x7f36d509d000, 0x7f36d509f000,
840 		0x7f36d509f000, 0x7f36d50a5000,
841 		0x7f36d50b9000, 0x7f36d50db000,
842 		0x7f36d50db000, 0x7f36d50dc000,
843 		0x7f36d50dc000, 0x7f36d50fa000,
844 		0x7f36d50fa000, 0x7f36d5102000,
845 		0x7f36d5102000, 0x7f36d5103000,
846 		0x7f36d5103000, 0x7f36d5104000,
847 		0x7f36d5104000, 0x7f36d5105000,
848 		0x7fff5876b000, 0x7fff5878d000,
849 		0x7fff5878e000, 0x7fff58791000,
850 		0x7fff58791000, 0x7fff58793000,
851 	};
852 	static const unsigned long holes[] = {
853 		/* Start of hole, end of hole,  size of hole (+1) */
854 		0x565234afb000, 0x565234afc000, 0x1000,
855 		0x565234afe000, 0x565235def000, 0x12F1000,
856 		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
857 	};
858 
859 	/*
860 	 * req_range consists of 4 values.
861 	 * 1. min index
862 	 * 2. max index
863 	 * 3. size
864 	 * 4. number that should be returned.
865 	 * 5. return value
866 	 */
867 	static const unsigned long req_range[] = {
868 		0x565234af9000, /* Min */
869 		0x7fff58791000, /* Max */
870 		0x1000,         /* Size */
871 		0x565234afb000, /* First hole in our data of size 1000. */
872 		0,              /* Return value success. */
873 
874 		0x0,            /* Min */
875 		0x7fff58791000, /* Max */
876 		0x1F00,         /* Size */
877 		0x0,            /* First hole in our data of size 2000. */
878 		0,              /* Return value success. */
879 
880 		/* Test ascend. */
881 		34148797436 << 12, /* Min */
882 		0x7fff587AF000,    /* Max */
883 		0x3000,         /* Size */
884 		34148798629 << 12,             /* Expected location */
885 		0,              /* Return value success. */
886 
887 		/* Test failing. */
888 		34148798623 << 12,  /* Min */
889 		34148798683 << 12,  /* Max */
890 		0x15000,            /* Size */
891 		0,             /* Expected location */
892 		-EBUSY,              /* Return value failed. */
893 
894 		/* Test filling entire gap. */
895 		34148798623 << 12,  /* Min */
896 		0x7fff587AF000,    /* Max */
897 		0x10000,           /* Size */
898 		34148798632 << 12,             /* Expected location */
899 		0,              /* Return value success. */
900 
901 		/* Test walking off the end of root. */
902 		0,                  /* Min */
903 		-1,                 /* Max */
904 		-1,                 /* Size */
905 		0,                  /* Expected location */
906 		-EBUSY,             /* Return value failure. */
907 
908 		/* Test looking for too large a hole across entire range. */
909 		0,                  /* Min */
910 		-1,                 /* Max */
911 		4503599618982063UL << 12,  /* Size */
912 		34359052178 << 12,  /* Expected location */
913 		-EBUSY,             /* Return failure. */
914 
915 		/* Test a single entry */
916 		34148798648 << 12,		/* Min */
917 		34148798648 << 12,		/* Max */
918 		4096,			/* Size of 1 */
919 		34148798648 << 12,	/* Location is the same as min/max */
920 		0,			/* Success */
921 	};
922 	int i, range_count = ARRAY_SIZE(range);
923 	int req_range_count = ARRAY_SIZE(req_range);
924 	unsigned long min = 0x565234af2000;
925 	MA_STATE(mas, mt, 0, 0);
926 
927 	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
928 			  GFP_KERNEL);
929 	for (i = 0; i < range_count; i += 2) {
930 #define DEBUG_ALLOC_RANGE 0
931 #if DEBUG_ALLOC_RANGE
932 		pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
933 			 (range[i + 1] >> 12) - 1);
934 		mt_dump(mt, mt_dump_hex);
935 #endif
936 		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
937 				xa_mk_value(range[i] >> 12), 0);
938 		mt_validate(mt);
939 	}
940 
941 
942 
943 	mas_lock(&mas);
944 	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
945 
946 #if DEBUG_ALLOC_RANGE
947 		pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
948 			holes[i+1] >> 12, holes[i+2] >> 12,
949 			min, holes[i+1]);
950 #endif
951 		MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
952 					holes[i+1] >> 12,
953 					holes[i+2] >> 12));
954 		MT_BUG_ON(mt, mas.index != holes[i] >> 12);
955 		min = holes[i+1];
956 		mas_reset(&mas);
957 	}
958 	mas_unlock(&mas);
959 	for (i = 0; i < req_range_count; i += 5) {
960 #if DEBUG_ALLOC_RANGE
961 		pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
962 			 i/5, req_range[i]   >> 12, req_range[i + 1]   >> 12,
963 			 req_range[i + 2]   >> 12, req_range[i + 3]   >> 12,
964 			 req_range[i], req_range[i+1]);
965 #endif
966 		check_mtree_alloc_range(mt,
967 				req_range[i]   >> 12, /* start */
968 				req_range[i+1] >> 12, /* end */
969 				req_range[i+2] >> 12, /* size */
970 				req_range[i+3] >> 12, /* expected address */
971 				req_range[i+4],       /* expected return */
972 				xa_mk_value(req_range[i] >> 12)); /* pointer */
973 		mt_validate(mt);
974 #if DEBUG_ALLOC_RANGE
975 		mt_dump(mt, mt_dump_hex);
976 #endif
977 	}
978 
979 	mtree_destroy(mt);
980 }
981 #endif
982 
check_ranges(struct maple_tree * mt)983 static noinline void __init check_ranges(struct maple_tree *mt)
984 {
985 	int i, val, val2;
986 	static const unsigned long r[] = {
987 		10, 15,
988 		20, 25,
989 		17, 22, /* Overlaps previous range. */
990 		9, 1000, /* Huge. */
991 		100, 200,
992 		45, 168,
993 		118, 128,
994 			};
995 
996 	MT_BUG_ON(mt, !mtree_empty(mt));
997 	check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
998 	check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
999 	check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1000 	MT_BUG_ON(mt, !mt_height(mt));
1001 	/* Store */
1002 	check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1003 	check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1004 	check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1005 	MT_BUG_ON(mt, !mt_height(mt));
1006 	mtree_destroy(mt);
1007 	MT_BUG_ON(mt, mt_height(mt));
1008 
1009 	check_seq(mt, 50, false);
1010 	mt_set_non_kernel(4);
1011 	check_store_range(mt, 5, 47,  xa_mk_value(47), 0);
1012 	MT_BUG_ON(mt, !mt_height(mt));
1013 	mtree_destroy(mt);
1014 
1015 	/* Create tree of 1-100 */
1016 	check_seq(mt, 100, false);
1017 	/* Store 45-168 */
1018 	mt_set_non_kernel(10);
1019 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1020 	MT_BUG_ON(mt, !mt_height(mt));
1021 	mtree_destroy(mt);
1022 
1023 	/* Create tree of 1-200 */
1024 	check_seq(mt, 200, false);
1025 	/* Store 45-168 */
1026 	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1027 	MT_BUG_ON(mt, !mt_height(mt));
1028 	mtree_destroy(mt);
1029 
1030 	check_seq(mt, 30, false);
1031 	check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1032 	MT_BUG_ON(mt, !mt_height(mt));
1033 	mtree_destroy(mt);
1034 
1035 	/* Overwrite across multiple levels. */
1036 	/* Create tree of 1-400 */
1037 	check_seq(mt, 400, false);
1038 	mt_set_non_kernel(50);
1039 	/* Store 118-128 */
1040 	check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1041 	mt_set_non_kernel(50);
1042 	mtree_test_erase(mt, 140);
1043 	mtree_test_erase(mt, 141);
1044 	mtree_test_erase(mt, 142);
1045 	mtree_test_erase(mt, 143);
1046 	mtree_test_erase(mt, 130);
1047 	mtree_test_erase(mt, 131);
1048 	mtree_test_erase(mt, 132);
1049 	mtree_test_erase(mt, 133);
1050 	mtree_test_erase(mt, 134);
1051 	mtree_test_erase(mt, 135);
1052 	check_load(mt, r[12], xa_mk_value(r[12]));
1053 	check_load(mt, r[13], xa_mk_value(r[12]));
1054 	check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1055 	check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1056 	check_load(mt, 135, NULL);
1057 	check_load(mt, 140, NULL);
1058 	mt_set_non_kernel(0);
1059 	MT_BUG_ON(mt, !mt_height(mt));
1060 	mtree_destroy(mt);
1061 
1062 
1063 
1064 	/* Overwrite multiple levels at the end of the tree (slot 7) */
1065 	mt_set_non_kernel(50);
1066 	check_seq(mt, 400, false);
1067 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1068 	check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1069 
1070 	check_load(mt, 346, xa_mk_value(346));
1071 	for (i = 347; i <= 352; i++)
1072 		check_load(mt, i, xa_mk_value(347));
1073 	for (i = 353; i <= 361; i++)
1074 		check_load(mt, i, xa_mk_value(353));
1075 	check_load(mt, 362, xa_mk_value(362));
1076 	mt_set_non_kernel(0);
1077 	MT_BUG_ON(mt, !mt_height(mt));
1078 	mtree_destroy(mt);
1079 
1080 	mt_set_non_kernel(50);
1081 	check_seq(mt, 400, false);
1082 	check_store_range(mt, 352, 364, NULL, 0);
1083 	check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1084 	check_load(mt, 350, xa_mk_value(350));
1085 	check_load(mt, 351, xa_mk_value(352));
1086 	for (i = 352; i <= 363; i++)
1087 		check_load(mt, i, xa_mk_value(352));
1088 	check_load(mt, 364, NULL);
1089 	check_load(mt, 365, xa_mk_value(365));
1090 	mt_set_non_kernel(0);
1091 	MT_BUG_ON(mt, !mt_height(mt));
1092 	mtree_destroy(mt);
1093 
1094 	mt_set_non_kernel(5);
1095 	check_seq(mt, 400, false);
1096 	check_store_range(mt, 352, 364, NULL, 0);
1097 	check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1098 	check_load(mt, 350, xa_mk_value(350));
1099 	check_load(mt, 351, xa_mk_value(352));
1100 	for (i = 352; i <= 364; i++)
1101 		check_load(mt, i, xa_mk_value(352));
1102 	check_load(mt, 365, xa_mk_value(365));
1103 	mt_set_non_kernel(0);
1104 	MT_BUG_ON(mt, !mt_height(mt));
1105 	mtree_destroy(mt);
1106 
1107 
1108 	mt_set_non_kernel(50);
1109 	check_seq(mt, 400, false);
1110 	check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1111 	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1112 	mt_set_non_kernel(0);
1113 	mt_validate(mt);
1114 	MT_BUG_ON(mt, !mt_height(mt));
1115 	mtree_destroy(mt);
1116 	/*
1117 	 * Interesting cases:
1118 	 * 1. Overwrite the end of a node and end in the first entry of the next
1119 	 * node.
1120 	 * 2. Split a single range
1121 	 * 3. Overwrite the start of a range
1122 	 * 4. Overwrite the end of a range
1123 	 * 5. Overwrite the entire range
1124 	 * 6. Overwrite a range that causes multiple parent nodes to be
1125 	 * combined
1126 	 * 7. Overwrite a range that causes multiple parent nodes and part of
1127 	 * root to be combined
1128 	 * 8. Overwrite the whole tree
1129 	 * 9. Try to overwrite the zero entry of an alloc tree.
1130 	 * 10. Write a range larger than a nodes current pivot
1131 	 */
1132 
1133 	mt_set_non_kernel(50);
1134 	for (i = 0; i <= 500; i++) {
1135 		val = i*5;
1136 		val2 = (i+1)*5;
1137 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1138 	}
1139 	check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1140 	check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1141 	check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1142 	check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1143 	check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1144 	mtree_destroy(mt);
1145 	mt_set_non_kernel(0);
1146 
1147 	mt_set_non_kernel(50);
1148 	for (i = 0; i <= 500; i++) {
1149 		val = i*5;
1150 		val2 = (i+1)*5;
1151 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1152 	}
1153 	check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1154 	check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1155 	check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1156 	check_store_range(mt, 2460, 2470, NULL, 0);
1157 	check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1158 	check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1159 	mt_set_non_kernel(0);
1160 	MT_BUG_ON(mt, !mt_height(mt));
1161 	mtree_destroy(mt);
1162 
1163 	/* Check in-place modifications */
1164 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1165 	/* Append to the start of last range */
1166 	mt_set_non_kernel(50);
1167 	for (i = 0; i <= 500; i++) {
1168 		val = i * 5 + 1;
1169 		val2 = val + 4;
1170 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1171 	}
1172 
1173 	/* Append to the last range without touching any boundaries */
1174 	for (i = 0; i < 10; i++) {
1175 		val = val2 + 5;
1176 		val2 = val + 4;
1177 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1178 	}
1179 
1180 	/* Append to the end of last range */
1181 	val = val2;
1182 	for (i = 0; i < 10; i++) {
1183 		val += 5;
1184 		MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1185 						     xa_mk_value(val)) != 0);
1186 	}
1187 
1188 	/* Overwriting the range and over a part of the next range */
1189 	for (i = 10; i < 30; i += 2) {
1190 		val = i * 5 + 1;
1191 		val2 = val + 5;
1192 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1193 	}
1194 
1195 	/* Overwriting a part of the range and over the next range */
1196 	for (i = 50; i < 70; i += 2) {
1197 		val2 = i * 5;
1198 		val = val2 - 5;
1199 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1200 	}
1201 
1202 	/*
1203 	 * Expand the range, only partially overwriting the previous and
1204 	 * next ranges
1205 	 */
1206 	for (i = 100; i < 130; i += 3) {
1207 		val = i * 5 - 5;
1208 		val2 = i * 5 + 1;
1209 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1210 	}
1211 
1212 	/*
1213 	 * Expand the range, only partially overwriting the previous and
1214 	 * next ranges, in RCU mode
1215 	 */
1216 	mt_set_in_rcu(mt);
1217 	for (i = 150; i < 180; i += 3) {
1218 		val = i * 5 - 5;
1219 		val2 = i * 5 + 1;
1220 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1221 	}
1222 
1223 	MT_BUG_ON(mt, !mt_height(mt));
1224 	mt_validate(mt);
1225 	mt_set_non_kernel(0);
1226 	mtree_destroy(mt);
1227 
1228 	/* Test rebalance gaps */
1229 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1230 	mt_set_non_kernel(50);
1231 	for (i = 0; i <= 50; i++) {
1232 		val = i*10;
1233 		val2 = (i+1)*10;
1234 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1235 	}
1236 	check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1237 	check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1238 	check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1239 	check_store_range(mt, 240, 249, NULL, 0);
1240 	mtree_erase(mt, 200);
1241 	mtree_erase(mt, 210);
1242 	mtree_erase(mt, 220);
1243 	mtree_erase(mt, 230);
1244 	mt_set_non_kernel(0);
1245 	MT_BUG_ON(mt, !mt_height(mt));
1246 	mtree_destroy(mt);
1247 
1248 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1249 	for (i = 0; i <= 500; i++) {
1250 		val = i*10;
1251 		val2 = (i+1)*10;
1252 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1253 	}
1254 	check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1255 	mt_validate(mt);
1256 	MT_BUG_ON(mt, !mt_height(mt));
1257 	mtree_destroy(mt);
1258 
1259 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1260 	for (i = 0; i <= 500; i++) {
1261 		val = i*10;
1262 		val2 = (i+1)*10;
1263 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1264 	}
1265 	check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1266 	check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1267 	check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1268 	check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1269 	check_store_range(mt, 4842, 4849, NULL, 0);
1270 	mt_validate(mt);
1271 	MT_BUG_ON(mt, !mt_height(mt));
1272 	mtree_destroy(mt);
1273 
1274 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1275 	for (i = 0; i <= 1300; i++) {
1276 		val = i*10;
1277 		val2 = (i+1)*10;
1278 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1279 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1280 	}
1281 	/*  Cause a 3 child split all the way up the tree. */
1282 	for (i = 5; i < 215; i += 10)
1283 		check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1284 	for (i = 5; i < 65; i += 10)
1285 		check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1286 
1287 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1288 	for (i = 5; i < 45; i += 10)
1289 		check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1290 	if (!MAPLE_32BIT)
1291 		MT_BUG_ON(mt, mt_height(mt) < 4);
1292 	mtree_destroy(mt);
1293 
1294 
1295 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1296 	for (i = 0; i <= 1200; i++) {
1297 		val = i*10;
1298 		val2 = (i+1)*10;
1299 		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1300 		MT_BUG_ON(mt, mt_height(mt) >= 4);
1301 	}
1302 	/* Fill parents and leaves before split. */
1303 	for (i = 5; i < 455; i += 10)
1304 		check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1305 
1306 	for (i = 1; i < 16; i++)
1307 		check_store_range(mt, 8185 + i, 8185 + i + 1,
1308 				  xa_mk_value(8185+i), 0);
1309 	MT_BUG_ON(mt, mt_height(mt) >= 4);
1310 	/* triple split across multiple levels. */
1311 	check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1312 	if (!MAPLE_32BIT)
1313 		MT_BUG_ON(mt, mt_height(mt) != 4);
1314 }
1315 
check_next_entry(struct maple_tree * mt)1316 static noinline void __init check_next_entry(struct maple_tree *mt)
1317 {
1318 	void *entry = NULL;
1319 	unsigned long limit = 30, i = 0;
1320 	MA_STATE(mas, mt, i, i);
1321 
1322 	MT_BUG_ON(mt, !mtree_empty(mt));
1323 
1324 	check_seq(mt, limit, false);
1325 	rcu_read_lock();
1326 
1327 	/* Check the first one and get ma_state in the correct state. */
1328 	MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1329 	for ( ; i <= limit + 1; i++) {
1330 		entry = mas_next(&mas, limit);
1331 		if (i > limit)
1332 			MT_BUG_ON(mt, entry != NULL);
1333 		else
1334 			MT_BUG_ON(mt, xa_mk_value(i) != entry);
1335 	}
1336 	rcu_read_unlock();
1337 	mtree_destroy(mt);
1338 }
1339 
check_prev_entry(struct maple_tree * mt)1340 static noinline void __init check_prev_entry(struct maple_tree *mt)
1341 {
1342 	unsigned long index = 16;
1343 	void *value;
1344 	int i;
1345 
1346 	MA_STATE(mas, mt, index, index);
1347 
1348 	MT_BUG_ON(mt, !mtree_empty(mt));
1349 	check_seq(mt, 30, false);
1350 
1351 	rcu_read_lock();
1352 	value = mas_find(&mas, ULONG_MAX);
1353 	MT_BUG_ON(mt, value != xa_mk_value(index));
1354 	value = mas_prev(&mas, 0);
1355 	MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1356 	rcu_read_unlock();
1357 	mtree_destroy(mt);
1358 
1359 	/* Check limits on prev */
1360 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1361 	mas_lock(&mas);
1362 	for (i = 0; i <= index; i++) {
1363 		mas_set_range(&mas, i*10, i*10+5);
1364 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1365 	}
1366 
1367 	mas_set(&mas, 20);
1368 	value = mas_walk(&mas);
1369 	MT_BUG_ON(mt, value != xa_mk_value(2));
1370 
1371 	value = mas_prev(&mas, 19);
1372 	MT_BUG_ON(mt, value != NULL);
1373 
1374 	mas_set(&mas, 80);
1375 	value = mas_walk(&mas);
1376 	MT_BUG_ON(mt, value != xa_mk_value(8));
1377 
1378 	value = mas_prev(&mas, 76);
1379 	MT_BUG_ON(mt, value != NULL);
1380 
1381 	mas_unlock(&mas);
1382 }
1383 
check_root_expand(struct maple_tree * mt)1384 static noinline void __init check_root_expand(struct maple_tree *mt)
1385 {
1386 	MA_STATE(mas, mt, 0, 0);
1387 	void *ptr;
1388 
1389 
1390 	mas_lock(&mas);
1391 	mas_set(&mas, 3);
1392 	ptr = mas_walk(&mas);
1393 	MT_BUG_ON(mt, mas.index != 0);
1394 	MT_BUG_ON(mt, ptr != NULL);
1395 	MT_BUG_ON(mt, mas.index != 0);
1396 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1397 
1398 	ptr = &check_prev_entry;
1399 	mas_set(&mas, 1);
1400 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1401 
1402 	mas_set(&mas, 0);
1403 	ptr = mas_walk(&mas);
1404 	MT_BUG_ON(mt, ptr != NULL);
1405 
1406 	mas_set(&mas, 1);
1407 	ptr = mas_walk(&mas);
1408 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1409 
1410 	mas_set(&mas, 2);
1411 	ptr = mas_walk(&mas);
1412 	MT_BUG_ON(mt, ptr != NULL);
1413 	mas_unlock(&mas);
1414 	mtree_destroy(mt);
1415 
1416 
1417 	mt_init_flags(mt, 0);
1418 	mas_lock(&mas);
1419 
1420 	mas_set(&mas, 0);
1421 	ptr = &check_prev_entry;
1422 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1423 
1424 	mas_set(&mas, 5);
1425 	ptr = mas_walk(&mas);
1426 	MT_BUG_ON(mt, ptr != NULL);
1427 	MT_BUG_ON(mt, mas.index != 1);
1428 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1429 
1430 	mas_set_range(&mas, 0, 100);
1431 	ptr = mas_walk(&mas);
1432 	MT_BUG_ON(mt, ptr != &check_prev_entry);
1433 	MT_BUG_ON(mt, mas.last != 0);
1434 	mas_unlock(&mas);
1435 	mtree_destroy(mt);
1436 
1437 	mt_init_flags(mt, 0);
1438 	mas_lock(&mas);
1439 
1440 	mas_set(&mas, 0);
1441 	ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1442 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1443 	ptr = mas_next(&mas, ULONG_MAX);
1444 	MT_BUG_ON(mt, ptr != NULL);
1445 	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1446 
1447 	mas_set(&mas, 1);
1448 	ptr = mas_prev(&mas, 0);
1449 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1450 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1451 
1452 	mas_unlock(&mas);
1453 
1454 	mtree_destroy(mt);
1455 
1456 	mt_init_flags(mt, 0);
1457 	mas_lock(&mas);
1458 	mas_set(&mas, 0);
1459 	ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1460 	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1461 	ptr = mas_next(&mas, ULONG_MAX);
1462 	MT_BUG_ON(mt, ptr != NULL);
1463 	MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1464 
1465 	mas_set(&mas, 1);
1466 	ptr = mas_prev(&mas, 0);
1467 	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1468 	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1469 
1470 
1471 	mas_unlock(&mas);
1472 }
1473 
check_gap_combining(struct maple_tree * mt)1474 static noinline void __init check_gap_combining(struct maple_tree *mt)
1475 {
1476 	struct maple_enode *mn1, *mn2;
1477 	void *entry;
1478 	unsigned long singletons = 100;
1479 	static const unsigned long *seq100;
1480 	static const unsigned long seq100_64[] = {
1481 		/* 0-5 */
1482 		74, 75, 76,
1483 		50, 100, 2,
1484 
1485 		/* 6-12 */
1486 		44, 45, 46, 43,
1487 		20, 50, 3,
1488 
1489 		/* 13-20*/
1490 		80, 81, 82,
1491 		76, 2, 79, 85, 4,
1492 	};
1493 
1494 	static const unsigned long seq100_32[] = {
1495 		/* 0-5 */
1496 		61, 62, 63,
1497 		50, 100, 2,
1498 
1499 		/* 6-12 */
1500 		31, 32, 33, 30,
1501 		20, 50, 3,
1502 
1503 		/* 13-20*/
1504 		80, 81, 82,
1505 		76, 2, 79, 85, 4,
1506 	};
1507 
1508 	static const unsigned long seq2000[] = {
1509 		1152, 1151,
1510 		1100, 1200, 2,
1511 	};
1512 	static const unsigned long seq400[] = {
1513 		286, 318,
1514 		256, 260, 266, 270, 275, 280, 290, 398,
1515 		286, 310,
1516 	};
1517 
1518 	unsigned long index;
1519 
1520 	MA_STATE(mas, mt, 0, 0);
1521 
1522 	if (MAPLE_32BIT)
1523 		seq100 = seq100_32;
1524 	else
1525 		seq100 = seq100_64;
1526 
1527 	index = seq100[0];
1528 	mas_set(&mas, index);
1529 	MT_BUG_ON(mt, !mtree_empty(mt));
1530 	check_seq(mt, singletons, false); /* create 100 singletons. */
1531 
1532 	mt_set_non_kernel(1);
1533 	mtree_test_erase(mt, seq100[2]);
1534 	check_load(mt, seq100[2], NULL);
1535 	mtree_test_erase(mt, seq100[1]);
1536 	check_load(mt, seq100[1], NULL);
1537 
1538 	rcu_read_lock();
1539 	entry = mas_find(&mas, ULONG_MAX);
1540 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1541 	mn1 = mas.node;
1542 	mas_next(&mas, ULONG_MAX);
1543 	entry = mas_next(&mas, ULONG_MAX);
1544 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1545 	mn2 = mas.node;
1546 	MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1547 
1548 	/*
1549 	 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1550 	 * seq100[4]. Search for the gap.
1551 	 */
1552 	mt_set_non_kernel(1);
1553 	mas_reset(&mas);
1554 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1555 					     seq100[5]));
1556 	MT_BUG_ON(mt, mas.index != index + 1);
1557 	rcu_read_unlock();
1558 
1559 	mtree_test_erase(mt, seq100[6]);
1560 	check_load(mt, seq100[6], NULL);
1561 	mtree_test_erase(mt, seq100[7]);
1562 	check_load(mt, seq100[7], NULL);
1563 	mtree_test_erase(mt, seq100[8]);
1564 	index = seq100[9];
1565 
1566 	rcu_read_lock();
1567 	mas.index = index;
1568 	mas.last = index;
1569 	mas_reset(&mas);
1570 	entry = mas_find(&mas, ULONG_MAX);
1571 	MT_BUG_ON(mt, entry != xa_mk_value(index));
1572 	mn1 = mas.node;
1573 	entry = mas_next(&mas, ULONG_MAX);
1574 	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1575 	mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1576 	mn2 = mas.node;
1577 	MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1578 
1579 	/*
1580 	 * At this point, there is a gap of 3 at seq100[6].  Find it by
1581 	 * searching 20 - 50 for size 3.
1582 	 */
1583 	mas_reset(&mas);
1584 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1585 					     seq100[12]));
1586 	MT_BUG_ON(mt, mas.index != seq100[6]);
1587 	rcu_read_unlock();
1588 
1589 	mt_set_non_kernel(1);
1590 	mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1591 	check_load(mt, seq100[13], NULL);
1592 	check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1593 	mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1594 	check_load(mt, seq100[13], NULL);
1595 	check_load(mt, seq100[14], NULL);
1596 
1597 	mas_reset(&mas);
1598 	rcu_read_lock();
1599 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1600 					     seq100[17]));
1601 	MT_BUG_ON(mt, mas.index != seq100[13]);
1602 	mt_validate(mt);
1603 	rcu_read_unlock();
1604 
1605 	/*
1606 	 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1607 	 * gap.
1608 	 */
1609 	mt_set_non_kernel(2);
1610 	mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1611 	mtree_test_erase(mt, seq100[15]);
1612 	mas_reset(&mas);
1613 	rcu_read_lock();
1614 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1615 					     seq100[20]));
1616 	rcu_read_unlock();
1617 	MT_BUG_ON(mt, mas.index != seq100[18]);
1618 	mt_validate(mt);
1619 	mtree_destroy(mt);
1620 
1621 	/* seq 2000 tests are for multi-level tree gaps */
1622 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1623 	check_seq(mt, 2000, false);
1624 	mt_set_non_kernel(1);
1625 	mtree_test_erase(mt, seq2000[0]);
1626 	mtree_test_erase(mt, seq2000[1]);
1627 
1628 	mt_set_non_kernel(2);
1629 	mas_reset(&mas);
1630 	rcu_read_lock();
1631 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1632 					     seq2000[4]));
1633 	MT_BUG_ON(mt, mas.index != seq2000[1]);
1634 	rcu_read_unlock();
1635 	mt_validate(mt);
1636 	mtree_destroy(mt);
1637 
1638 	/* seq 400 tests rebalancing over two levels. */
1639 	mt_set_non_kernel(99);
1640 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1641 	check_seq(mt, 400, false);
1642 	mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1643 	mt_set_non_kernel(0);
1644 	mtree_destroy(mt);
1645 
1646 	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1647 	check_seq(mt, 400, false);
1648 	mt_set_non_kernel(50);
1649 	mtree_test_store_range(mt, seq400[2], seq400[9],
1650 			       xa_mk_value(seq400[2]));
1651 	mtree_test_store_range(mt, seq400[3], seq400[9],
1652 			       xa_mk_value(seq400[3]));
1653 	mtree_test_store_range(mt, seq400[4], seq400[9],
1654 			       xa_mk_value(seq400[4]));
1655 	mtree_test_store_range(mt, seq400[5], seq400[9],
1656 			       xa_mk_value(seq400[5]));
1657 	mtree_test_store_range(mt, seq400[0], seq400[9],
1658 			       xa_mk_value(seq400[0]));
1659 	mtree_test_store_range(mt, seq400[6], seq400[9],
1660 			       xa_mk_value(seq400[6]));
1661 	mtree_test_store_range(mt, seq400[7], seq400[9],
1662 			       xa_mk_value(seq400[7]));
1663 	mtree_test_store_range(mt, seq400[8], seq400[9],
1664 			       xa_mk_value(seq400[8]));
1665 	mtree_test_store_range(mt, seq400[10], seq400[11],
1666 			       xa_mk_value(seq400[10]));
1667 	mt_validate(mt);
1668 	mt_set_non_kernel(0);
1669 	mtree_destroy(mt);
1670 }
check_node_overwrite(struct maple_tree * mt)1671 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1672 {
1673 	int i, max = 4000;
1674 
1675 	for (i = 0; i < max; i++)
1676 		mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1677 
1678 	mtree_test_store_range(mt, 319951, 367950, NULL);
1679 	/*mt_dump(mt, mt_dump_dec); */
1680 	mt_validate(mt);
1681 }
1682 
1683 #if defined(BENCH_SLOT_STORE)
bench_slot_store(struct maple_tree * mt)1684 static noinline void __init bench_slot_store(struct maple_tree *mt)
1685 {
1686 	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1687 
1688 	for (i = 0; i < max; i += 10)
1689 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1690 
1691 	for (i = 0; i < count; i++) {
1692 		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1693 		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1694 				  GFP_KERNEL);
1695 	}
1696 }
1697 #endif
1698 
1699 #if defined(BENCH_NODE_STORE)
bench_node_store(struct maple_tree * mt)1700 static noinline void __init bench_node_store(struct maple_tree *mt)
1701 {
1702 	int i, overwrite = 76, max = 240, count = 20000000;
1703 
1704 	for (i = 0; i < max; i += 10)
1705 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1706 
1707 	for (i = 0; i < count; i++) {
1708 		mtree_store_range(mt, overwrite,  overwrite + 15,
1709 				  xa_mk_value(overwrite), GFP_KERNEL);
1710 
1711 		overwrite += 5;
1712 		if (overwrite >= 135)
1713 			overwrite = 76;
1714 	}
1715 }
1716 #endif
1717 
1718 #if defined(BENCH_AWALK)
bench_awalk(struct maple_tree * mt)1719 static noinline void __init bench_awalk(struct maple_tree *mt)
1720 {
1721 	int i, max = 2500, count = 50000000;
1722 	MA_STATE(mas, mt, 1470, 1470);
1723 
1724 	for (i = 0; i < max; i += 10)
1725 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1726 
1727 	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1728 
1729 	for (i = 0; i < count; i++) {
1730 		mas_empty_area_rev(&mas, 0, 2000, 10);
1731 		mas_reset(&mas);
1732 	}
1733 }
1734 #endif
1735 #if defined(BENCH_WALK)
bench_walk(struct maple_tree * mt)1736 static noinline void __init bench_walk(struct maple_tree *mt)
1737 {
1738 	int i, max = 2500, count = 550000000;
1739 	MA_STATE(mas, mt, 1470, 1470);
1740 
1741 	for (i = 0; i < max; i += 10)
1742 		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1743 
1744 	for (i = 0; i < count; i++) {
1745 		mas_walk(&mas);
1746 		mas_reset(&mas);
1747 	}
1748 
1749 }
1750 #endif
1751 
1752 #if defined(BENCH_MT_FOR_EACH)
bench_mt_for_each(struct maple_tree * mt)1753 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1754 {
1755 	int i, count = 1000000;
1756 	unsigned long max = 2500, index = 0;
1757 	void *entry;
1758 
1759 	for (i = 0; i < max; i += 5)
1760 		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1761 
1762 	for (i = 0; i < count; i++) {
1763 		unsigned long j = 0;
1764 
1765 		mt_for_each(mt, entry, index, max) {
1766 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1767 			j += 5;
1768 		}
1769 
1770 		index = 0;
1771 	}
1772 
1773 }
1774 #endif
1775 
1776 #if defined(BENCH_MAS_FOR_EACH)
bench_mas_for_each(struct maple_tree * mt)1777 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1778 {
1779 	int i, count = 1000000;
1780 	unsigned long max = 2500;
1781 	void *entry;
1782 	MA_STATE(mas, mt, 0, 0);
1783 
1784 	for (i = 0; i < max; i += 5) {
1785 		int gap = 4;
1786 
1787 		if (i % 30 == 0)
1788 			gap = 3;
1789 		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1790 	}
1791 
1792 	rcu_read_lock();
1793 	for (i = 0; i < count; i++) {
1794 		unsigned long j = 0;
1795 
1796 		mas_for_each(&mas, entry, max) {
1797 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1798 			j += 5;
1799 		}
1800 		mas_set(&mas, 0);
1801 	}
1802 	rcu_read_unlock();
1803 
1804 }
1805 #endif
1806 #if defined(BENCH_MAS_PREV)
bench_mas_prev(struct maple_tree * mt)1807 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1808 {
1809 	int i, count = 1000000;
1810 	unsigned long max = 2500;
1811 	void *entry;
1812 	MA_STATE(mas, mt, 0, 0);
1813 
1814 	for (i = 0; i < max; i += 5) {
1815 		int gap = 4;
1816 
1817 		if (i % 30 == 0)
1818 			gap = 3;
1819 		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1820 	}
1821 
1822 	rcu_read_lock();
1823 	for (i = 0; i < count; i++) {
1824 		unsigned long j = 2495;
1825 
1826 		mas_set(&mas, ULONG_MAX);
1827 		while ((entry = mas_prev(&mas, 0)) != NULL) {
1828 			MT_BUG_ON(mt, entry != xa_mk_value(j));
1829 			j -= 5;
1830 		}
1831 	}
1832 	rcu_read_unlock();
1833 
1834 }
1835 #endif
1836 /* check_forking - simulate the kernel forking sequence with the tree. */
check_forking(void)1837 static noinline void __init check_forking(void)
1838 {
1839 	struct maple_tree mt, newmt;
1840 	int i, nr_entries = 134, ret;
1841 	void *val;
1842 	MA_STATE(mas, &mt, 0, 0);
1843 	MA_STATE(newmas, &newmt, 0, 0);
1844 	struct rw_semaphore mt_lock, newmt_lock;
1845 
1846 	init_rwsem(&mt_lock);
1847 	init_rwsem(&newmt_lock);
1848 
1849 	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1850 	mt_set_external_lock(&mt, &mt_lock);
1851 
1852 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1853 	mt_set_external_lock(&newmt, &newmt_lock);
1854 
1855 	down_write(&mt_lock);
1856 	for (i = 0; i <= nr_entries; i++) {
1857 		mas_set_range(&mas, i*10, i*10 + 5);
1858 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1859 	}
1860 
1861 	down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
1862 	ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
1863 	if (ret) {
1864 		pr_err("OOM!");
1865 		BUG_ON(1);
1866 	}
1867 
1868 	mas_set(&newmas, 0);
1869 	mas_for_each(&newmas, val, ULONG_MAX)
1870 		mas_store(&newmas, val);
1871 
1872 	mas_destroy(&newmas);
1873 	mas_destroy(&mas);
1874 	mt_validate(&newmt);
1875 	__mt_destroy(&newmt);
1876 	__mt_destroy(&mt);
1877 	up_write(&newmt_lock);
1878 	up_write(&mt_lock);
1879 }
1880 
check_iteration(struct maple_tree * mt)1881 static noinline void __init check_iteration(struct maple_tree *mt)
1882 {
1883 	int i, nr_entries = 125;
1884 	void *val;
1885 	MA_STATE(mas, mt, 0, 0);
1886 
1887 	for (i = 0; i <= nr_entries; i++)
1888 		mtree_store_range(mt, i * 10, i * 10 + 9,
1889 				  xa_mk_value(i), GFP_KERNEL);
1890 
1891 	mt_set_non_kernel(99999);
1892 
1893 	i = 0;
1894 	mas_lock(&mas);
1895 	mas_for_each(&mas, val, 925) {
1896 		MT_BUG_ON(mt, mas.index != i * 10);
1897 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1898 		/* Overwrite end of entry 92 */
1899 		if (i == 92) {
1900 			mas.index = 925;
1901 			mas.last = 929;
1902 			mas_store(&mas, val);
1903 		}
1904 		i++;
1905 	}
1906 	/* Ensure mas_find() gets the next value */
1907 	val = mas_find(&mas, ULONG_MAX);
1908 	MT_BUG_ON(mt, val != xa_mk_value(i));
1909 
1910 	mas_set(&mas, 0);
1911 	i = 0;
1912 	mas_for_each(&mas, val, 785) {
1913 		MT_BUG_ON(mt, mas.index != i * 10);
1914 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1915 		/* Overwrite start of entry 78 */
1916 		if (i == 78) {
1917 			mas.index = 780;
1918 			mas.last = 785;
1919 			mas_store(&mas, val);
1920 		} else {
1921 			i++;
1922 		}
1923 	}
1924 	val = mas_find(&mas, ULONG_MAX);
1925 	MT_BUG_ON(mt, val != xa_mk_value(i));
1926 
1927 	mas_set(&mas, 0);
1928 	i = 0;
1929 	mas_for_each(&mas, val, 765) {
1930 		MT_BUG_ON(mt, mas.index != i * 10);
1931 		MT_BUG_ON(mt, mas.last != i * 10 + 9);
1932 		/* Overwrite end of entry 76 and advance to the end */
1933 		if (i == 76) {
1934 			mas.index = 760;
1935 			mas.last = 765;
1936 			mas_store(&mas, val);
1937 		}
1938 		i++;
1939 	}
1940 	/* Make sure the next find returns the one after 765, 766-769 */
1941 	val = mas_find(&mas, ULONG_MAX);
1942 	MT_BUG_ON(mt, val != xa_mk_value(76));
1943 	mas_unlock(&mas);
1944 	mas_destroy(&mas);
1945 	mt_set_non_kernel(0);
1946 }
1947 
check_mas_store_gfp(struct maple_tree * mt)1948 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1949 {
1950 
1951 	struct maple_tree newmt;
1952 	int i, nr_entries = 135;
1953 	void *val;
1954 	MA_STATE(mas, mt, 0, 0);
1955 	MA_STATE(newmas, mt, 0, 0);
1956 
1957 	for (i = 0; i <= nr_entries; i++)
1958 		mtree_store_range(mt, i*10, i*10 + 5,
1959 				  xa_mk_value(i), GFP_KERNEL);
1960 
1961 	mt_set_non_kernel(99999);
1962 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1963 	newmas.tree = &newmt;
1964 	rcu_read_lock();
1965 	mas_lock(&newmas);
1966 	mas_reset(&newmas);
1967 	mas_set(&mas, 0);
1968 	mas_for_each(&mas, val, ULONG_MAX) {
1969 		newmas.index = mas.index;
1970 		newmas.last = mas.last;
1971 		mas_store_gfp(&newmas, val, GFP_KERNEL);
1972 	}
1973 	mas_unlock(&newmas);
1974 	rcu_read_unlock();
1975 	mt_validate(&newmt);
1976 	mt_set_non_kernel(0);
1977 	mtree_destroy(&newmt);
1978 }
1979 
1980 #if defined(BENCH_FORK)
bench_forking(void)1981 static noinline void __init bench_forking(void)
1982 {
1983 	struct maple_tree mt, newmt;
1984 	int i, nr_entries = 134, nr_fork = 80000, ret;
1985 	void *val;
1986 	MA_STATE(mas, &mt, 0, 0);
1987 	MA_STATE(newmas, &newmt, 0, 0);
1988 	struct rw_semaphore mt_lock, newmt_lock;
1989 
1990 	init_rwsem(&mt_lock);
1991 	init_rwsem(&newmt_lock);
1992 
1993 	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1994 	mt_set_external_lock(&mt, &mt_lock);
1995 
1996 	down_write(&mt_lock);
1997 	for (i = 0; i <= nr_entries; i++) {
1998 		mas_set_range(&mas, i*10, i*10 + 5);
1999 		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2000 	}
2001 
2002 	for (i = 0; i < nr_fork; i++) {
2003 		mt_init_flags(&newmt,
2004 			      MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2005 		mt_set_external_lock(&newmt, &newmt_lock);
2006 
2007 		down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2008 		ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2009 		if (ret) {
2010 			pr_err("OOM!");
2011 			BUG_ON(1);
2012 		}
2013 
2014 		mas_set(&newmas, 0);
2015 		mas_for_each(&newmas, val, ULONG_MAX)
2016 			mas_store(&newmas, val);
2017 
2018 		mas_destroy(&newmas);
2019 		mt_validate(&newmt);
2020 		__mt_destroy(&newmt);
2021 		up_write(&newmt_lock);
2022 	}
2023 	mas_destroy(&mas);
2024 	__mt_destroy(&mt);
2025 	up_write(&mt_lock);
2026 }
2027 #endif
2028 
next_prev_test(struct maple_tree * mt)2029 static noinline void __init next_prev_test(struct maple_tree *mt)
2030 {
2031 	int i, nr_entries;
2032 	void *val;
2033 	MA_STATE(mas, mt, 0, 0);
2034 	struct maple_enode *mn;
2035 	static const unsigned long *level2;
2036 	static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2037 						   725};
2038 	static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2039 						   1760, 1765};
2040 	unsigned long last_index;
2041 
2042 	if (MAPLE_32BIT) {
2043 		nr_entries = 500;
2044 		level2 = level2_32;
2045 		last_index = 0x138e;
2046 	} else {
2047 		nr_entries = 200;
2048 		level2 = level2_64;
2049 		last_index = 0x7d6;
2050 	}
2051 
2052 	for (i = 0; i <= nr_entries; i++)
2053 		mtree_store_range(mt, i*10, i*10 + 5,
2054 				  xa_mk_value(i), GFP_KERNEL);
2055 
2056 	mas_lock(&mas);
2057 	for (i = 0; i <= nr_entries / 2; i++) {
2058 		mas_next(&mas, 1000);
2059 		if (mas_is_none(&mas))
2060 			break;
2061 
2062 	}
2063 	mas_reset(&mas);
2064 	mas_set(&mas, 0);
2065 	i = 0;
2066 	mas_for_each(&mas, val, 1000) {
2067 		i++;
2068 	}
2069 
2070 	mas_reset(&mas);
2071 	mas_set(&mas, 0);
2072 	i = 0;
2073 	mas_for_each(&mas, val, 1000) {
2074 		mas_pause(&mas);
2075 		i++;
2076 	}
2077 
2078 	/*
2079 	 * 680 - 685 = 0x61a00001930c
2080 	 * 686 - 689 = NULL;
2081 	 * 690 - 695 = 0x61a00001930c
2082 	 * Check simple next/prev
2083 	 */
2084 	mas_set(&mas, 686);
2085 	val = mas_walk(&mas);
2086 	MT_BUG_ON(mt, val != NULL);
2087 
2088 	val = mas_next(&mas, 1000);
2089 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2090 	MT_BUG_ON(mt, mas.index != 690);
2091 	MT_BUG_ON(mt, mas.last != 695);
2092 
2093 	val = mas_prev(&mas, 0);
2094 	MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2095 	MT_BUG_ON(mt, mas.index != 680);
2096 	MT_BUG_ON(mt, mas.last != 685);
2097 
2098 	val = mas_next(&mas, 1000);
2099 	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2100 	MT_BUG_ON(mt, mas.index != 690);
2101 	MT_BUG_ON(mt, mas.last != 695);
2102 
2103 	val = mas_next(&mas, 1000);
2104 	MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2105 	MT_BUG_ON(mt, mas.index != 700);
2106 	MT_BUG_ON(mt, mas.last != 705);
2107 
2108 	/* Check across node boundaries of the tree */
2109 	mas_set(&mas, 70);
2110 	val = mas_walk(&mas);
2111 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2112 	MT_BUG_ON(mt, mas.index != 70);
2113 	MT_BUG_ON(mt, mas.last != 75);
2114 
2115 	val = mas_next(&mas, 1000);
2116 	MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2117 	MT_BUG_ON(mt, mas.index != 80);
2118 	MT_BUG_ON(mt, mas.last != 85);
2119 
2120 	val = mas_prev(&mas, 70);
2121 	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2122 	MT_BUG_ON(mt, mas.index != 70);
2123 	MT_BUG_ON(mt, mas.last != 75);
2124 
2125 	/* Check across two levels of the tree */
2126 	mas_reset(&mas);
2127 	mas_set(&mas, level2[0]);
2128 	val = mas_walk(&mas);
2129 	MT_BUG_ON(mt, val != NULL);
2130 	val = mas_next(&mas, level2[1]);
2131 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2132 	MT_BUG_ON(mt, mas.index != level2[2]);
2133 	MT_BUG_ON(mt, mas.last != level2[3]);
2134 	mn = mas.node;
2135 
2136 	val = mas_next(&mas, level2[1]);
2137 	MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2138 	MT_BUG_ON(mt, mas.index != level2[4]);
2139 	MT_BUG_ON(mt, mas.last != level2[5]);
2140 	MT_BUG_ON(mt, mn == mas.node);
2141 
2142 	val = mas_prev(&mas, 0);
2143 	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2144 	MT_BUG_ON(mt, mas.index != level2[2]);
2145 	MT_BUG_ON(mt, mas.last != level2[3]);
2146 
2147 	/* Check running off the end and back on */
2148 	mas_set(&mas, nr_entries * 10);
2149 	val = mas_walk(&mas);
2150 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2151 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2152 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2153 
2154 	val = mas_next(&mas, ULONG_MAX);
2155 	MT_BUG_ON(mt, val != NULL);
2156 	MT_BUG_ON(mt, mas.index != last_index);
2157 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
2158 
2159 	val = mas_prev(&mas, 0);
2160 	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2161 	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2162 	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2163 
2164 	/* Check running off the start and back on */
2165 	mas_reset(&mas);
2166 	mas_set(&mas, 10);
2167 	val = mas_walk(&mas);
2168 	MT_BUG_ON(mt, val != xa_mk_value(1));
2169 	MT_BUG_ON(mt, mas.index != 10);
2170 	MT_BUG_ON(mt, mas.last != 15);
2171 
2172 	val = mas_prev(&mas, 0);
2173 	MT_BUG_ON(mt, val != xa_mk_value(0));
2174 	MT_BUG_ON(mt, mas.index != 0);
2175 	MT_BUG_ON(mt, mas.last != 5);
2176 
2177 	val = mas_prev(&mas, 0);
2178 	MT_BUG_ON(mt, val != NULL);
2179 	MT_BUG_ON(mt, mas.index != 0);
2180 	MT_BUG_ON(mt, mas.last != 5);
2181 	MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
2182 
2183 	mas.index = 0;
2184 	mas.last = 5;
2185 	mas_store(&mas, NULL);
2186 	mas_reset(&mas);
2187 	mas_set(&mas, 10);
2188 	mas_walk(&mas);
2189 
2190 	val = mas_prev(&mas, 0);
2191 	MT_BUG_ON(mt, val != NULL);
2192 	MT_BUG_ON(mt, mas.index != 0);
2193 	MT_BUG_ON(mt, mas.last != 9);
2194 	mas_unlock(&mas);
2195 
2196 	mtree_destroy(mt);
2197 
2198 	mt_init(mt);
2199 	mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2200 	mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2201 	rcu_read_lock();
2202 	mas_set(&mas, 5);
2203 	val = mas_prev(&mas, 4);
2204 	MT_BUG_ON(mt, val != NULL);
2205 	rcu_read_unlock();
2206 }
2207 
2208 
2209 
2210 /* Test spanning writes that require balancing right sibling or right cousin */
check_spanning_relatives(struct maple_tree * mt)2211 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2212 {
2213 
2214 	unsigned long i, nr_entries = 1000;
2215 
2216 	for (i = 0; i <= nr_entries; i++)
2217 		mtree_store_range(mt, i*10, i*10 + 5,
2218 				  xa_mk_value(i), GFP_KERNEL);
2219 
2220 
2221 	mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2222 }
2223 
check_fuzzer(struct maple_tree * mt)2224 static noinline void __init check_fuzzer(struct maple_tree *mt)
2225 {
2226 	/*
2227 	 * 1. Causes a spanning rebalance of a single root node.
2228 	 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2229 	 * entire right side is consumed.
2230 	 */
2231 	mtree_test_insert(mt, 88, (void *)0xb1);
2232 	mtree_test_insert(mt, 84, (void *)0xa9);
2233 	mtree_test_insert(mt, 2,  (void *)0x5);
2234 	mtree_test_insert(mt, 4,  (void *)0x9);
2235 	mtree_test_insert(mt, 14, (void *)0x1d);
2236 	mtree_test_insert(mt, 7,  (void *)0xf);
2237 	mtree_test_insert(mt, 12, (void *)0x19);
2238 	mtree_test_insert(mt, 18, (void *)0x25);
2239 	mtree_test_store_range(mt, 8, 18, (void *)0x11);
2240 	mtree_destroy(mt);
2241 
2242 
2243 	/*
2244 	 * 2. Cause a spanning rebalance of two nodes in root.
2245 	 * Fixed by setting mast->r->max correctly.
2246 	 */
2247 	mt_init_flags(mt, 0);
2248 	mtree_test_store(mt, 87, (void *)0xaf);
2249 	mtree_test_store(mt, 0, (void *)0x1);
2250 	mtree_test_load(mt, 4);
2251 	mtree_test_insert(mt, 4, (void *)0x9);
2252 	mtree_test_store(mt, 8, (void *)0x11);
2253 	mtree_test_store(mt, 44, (void *)0x59);
2254 	mtree_test_store(mt, 68, (void *)0x89);
2255 	mtree_test_store(mt, 2, (void *)0x5);
2256 	mtree_test_insert(mt, 43, (void *)0x57);
2257 	mtree_test_insert(mt, 24, (void *)0x31);
2258 	mtree_test_insert(mt, 844, (void *)0x699);
2259 	mtree_test_store(mt, 84, (void *)0xa9);
2260 	mtree_test_store(mt, 4, (void *)0x9);
2261 	mtree_test_erase(mt, 4);
2262 	mtree_test_load(mt, 5);
2263 	mtree_test_erase(mt, 0);
2264 	mtree_destroy(mt);
2265 
2266 	/*
2267 	 * 3. Cause a node overflow on copy
2268 	 * Fixed by using the correct check for node size in mas_wr_modify()
2269 	 * Also discovered issue with metadata setting.
2270 	 */
2271 	mt_init_flags(mt, 0);
2272 	mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2273 	mtree_test_store(mt, 4, (void *)0x9);
2274 	mtree_test_erase(mt, 5);
2275 	mtree_test_erase(mt, 0);
2276 	mtree_test_erase(mt, 4);
2277 	mtree_test_store(mt, 5, (void *)0xb);
2278 	mtree_test_erase(mt, 5);
2279 	mtree_test_store(mt, 5, (void *)0xb);
2280 	mtree_test_erase(mt, 5);
2281 	mtree_test_erase(mt, 4);
2282 	mtree_test_store(mt, 4, (void *)0x9);
2283 	mtree_test_store(mt, 444, (void *)0x379);
2284 	mtree_test_store(mt, 0, (void *)0x1);
2285 	mtree_test_load(mt, 0);
2286 	mtree_test_store(mt, 5, (void *)0xb);
2287 	mtree_test_erase(mt, 0);
2288 	mtree_destroy(mt);
2289 
2290 	/*
2291 	 * 4. spanning store failure due to writing incorrect pivot value at
2292 	 * last slot.
2293 	 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2294 	 *
2295 	 */
2296 	mt_init_flags(mt, 0);
2297 	mtree_test_insert(mt, 261, (void *)0x20b);
2298 	mtree_test_store(mt, 516, (void *)0x409);
2299 	mtree_test_store(mt, 6, (void *)0xd);
2300 	mtree_test_insert(mt, 5, (void *)0xb);
2301 	mtree_test_insert(mt, 1256, (void *)0x9d1);
2302 	mtree_test_store(mt, 4, (void *)0x9);
2303 	mtree_test_erase(mt, 1);
2304 	mtree_test_store(mt, 56, (void *)0x71);
2305 	mtree_test_insert(mt, 1, (void *)0x3);
2306 	mtree_test_store(mt, 24, (void *)0x31);
2307 	mtree_test_erase(mt, 1);
2308 	mtree_test_insert(mt, 2263, (void *)0x11af);
2309 	mtree_test_insert(mt, 446, (void *)0x37d);
2310 	mtree_test_store_range(mt, 6, 45, (void *)0xd);
2311 	mtree_test_store_range(mt, 3, 446, (void *)0x7);
2312 	mtree_destroy(mt);
2313 
2314 	/*
2315 	 * 5. mas_wr_extend_null() may overflow slots.
2316 	 * Fix by checking against wr_mas->node_end.
2317 	 */
2318 	mt_init_flags(mt, 0);
2319 	mtree_test_store(mt, 48, (void *)0x61);
2320 	mtree_test_store(mt, 3, (void *)0x7);
2321 	mtree_test_load(mt, 0);
2322 	mtree_test_store(mt, 88, (void *)0xb1);
2323 	mtree_test_store(mt, 81, (void *)0xa3);
2324 	mtree_test_insert(mt, 0, (void *)0x1);
2325 	mtree_test_insert(mt, 8, (void *)0x11);
2326 	mtree_test_insert(mt, 4, (void *)0x9);
2327 	mtree_test_insert(mt, 2480, (void *)0x1361);
2328 	mtree_test_insert(mt, ULONG_MAX,
2329 			  (void *)0xffffffffffffffff);
2330 	mtree_test_erase(mt, ULONG_MAX);
2331 	mtree_destroy(mt);
2332 
2333 	/*
2334 	 * 6.  When reusing a node with an implied pivot and the node is
2335 	 * shrinking, old data would be left in the implied slot
2336 	 * Fixed by checking the last pivot for the mas->max and clear
2337 	 * accordingly.  This only affected the left-most node as that node is
2338 	 * the only one allowed to end in NULL.
2339 	 */
2340 	mt_init_flags(mt, 0);
2341 	mtree_test_erase(mt, 3);
2342 	mtree_test_insert(mt, 22, (void *)0x2d);
2343 	mtree_test_insert(mt, 15, (void *)0x1f);
2344 	mtree_test_load(mt, 2);
2345 	mtree_test_insert(mt, 1, (void *)0x3);
2346 	mtree_test_insert(mt, 1, (void *)0x3);
2347 	mtree_test_insert(mt, 5, (void *)0xb);
2348 	mtree_test_erase(mt, 1);
2349 	mtree_test_insert(mt, 1, (void *)0x3);
2350 	mtree_test_insert(mt, 4, (void *)0x9);
2351 	mtree_test_insert(mt, 1, (void *)0x3);
2352 	mtree_test_erase(mt, 1);
2353 	mtree_test_insert(mt, 2, (void *)0x5);
2354 	mtree_test_insert(mt, 1, (void *)0x3);
2355 	mtree_test_erase(mt, 3);
2356 	mtree_test_insert(mt, 22, (void *)0x2d);
2357 	mtree_test_insert(mt, 15, (void *)0x1f);
2358 	mtree_test_insert(mt, 2, (void *)0x5);
2359 	mtree_test_insert(mt, 1, (void *)0x3);
2360 	mtree_test_insert(mt, 8, (void *)0x11);
2361 	mtree_test_load(mt, 2);
2362 	mtree_test_insert(mt, 1, (void *)0x3);
2363 	mtree_test_insert(mt, 1, (void *)0x3);
2364 	mtree_test_store(mt, 1, (void *)0x3);
2365 	mtree_test_insert(mt, 5, (void *)0xb);
2366 	mtree_test_erase(mt, 1);
2367 	mtree_test_insert(mt, 1, (void *)0x3);
2368 	mtree_test_insert(mt, 4, (void *)0x9);
2369 	mtree_test_insert(mt, 1, (void *)0x3);
2370 	mtree_test_erase(mt, 1);
2371 	mtree_test_insert(mt, 2, (void *)0x5);
2372 	mtree_test_insert(mt, 1, (void *)0x3);
2373 	mtree_test_erase(mt, 3);
2374 	mtree_test_insert(mt, 22, (void *)0x2d);
2375 	mtree_test_insert(mt, 15, (void *)0x1f);
2376 	mtree_test_insert(mt, 2, (void *)0x5);
2377 	mtree_test_insert(mt, 1, (void *)0x3);
2378 	mtree_test_insert(mt, 8, (void *)0x11);
2379 	mtree_test_insert(mt, 12, (void *)0x19);
2380 	mtree_test_erase(mt, 1);
2381 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2382 	mtree_test_erase(mt, 62);
2383 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2384 	mtree_test_insert(mt, 11, (void *)0x17);
2385 	mtree_test_insert(mt, 3, (void *)0x7);
2386 	mtree_test_insert(mt, 3, (void *)0x7);
2387 	mtree_test_store(mt, 62, (void *)0x7d);
2388 	mtree_test_erase(mt, 62);
2389 	mtree_test_store_range(mt, 1, 15, (void *)0x3);
2390 	mtree_test_erase(mt, 1);
2391 	mtree_test_insert(mt, 22, (void *)0x2d);
2392 	mtree_test_insert(mt, 12, (void *)0x19);
2393 	mtree_test_erase(mt, 1);
2394 	mtree_test_insert(mt, 3, (void *)0x7);
2395 	mtree_test_store(mt, 62, (void *)0x7d);
2396 	mtree_test_erase(mt, 62);
2397 	mtree_test_insert(mt, 122, (void *)0xf5);
2398 	mtree_test_store(mt, 3, (void *)0x7);
2399 	mtree_test_insert(mt, 0, (void *)0x1);
2400 	mtree_test_store_range(mt, 0, 1, (void *)0x1);
2401 	mtree_test_insert(mt, 85, (void *)0xab);
2402 	mtree_test_insert(mt, 72, (void *)0x91);
2403 	mtree_test_insert(mt, 81, (void *)0xa3);
2404 	mtree_test_insert(mt, 726, (void *)0x5ad);
2405 	mtree_test_insert(mt, 0, (void *)0x1);
2406 	mtree_test_insert(mt, 1, (void *)0x3);
2407 	mtree_test_store(mt, 51, (void *)0x67);
2408 	mtree_test_insert(mt, 611, (void *)0x4c7);
2409 	mtree_test_insert(mt, 485, (void *)0x3cb);
2410 	mtree_test_insert(mt, 1, (void *)0x3);
2411 	mtree_test_erase(mt, 1);
2412 	mtree_test_insert(mt, 0, (void *)0x1);
2413 	mtree_test_insert(mt, 1, (void *)0x3);
2414 	mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2415 	mtree_test_load(mt, 1);
2416 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2417 	mtree_test_insert(mt, 1, (void *)0x3);
2418 	mtree_test_erase(mt, 1);
2419 	mtree_test_load(mt, 53);
2420 	mtree_test_load(mt, 1);
2421 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2422 	mtree_test_insert(mt, 222, (void *)0x1bd);
2423 	mtree_test_insert(mt, 485, (void *)0x3cb);
2424 	mtree_test_insert(mt, 1, (void *)0x3);
2425 	mtree_test_erase(mt, 1);
2426 	mtree_test_load(mt, 0);
2427 	mtree_test_insert(mt, 21, (void *)0x2b);
2428 	mtree_test_insert(mt, 3, (void *)0x7);
2429 	mtree_test_store(mt, 621, (void *)0x4db);
2430 	mtree_test_insert(mt, 0, (void *)0x1);
2431 	mtree_test_erase(mt, 5);
2432 	mtree_test_insert(mt, 1, (void *)0x3);
2433 	mtree_test_store(mt, 62, (void *)0x7d);
2434 	mtree_test_erase(mt, 62);
2435 	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2436 	mtree_test_insert(mt, 22, (void *)0x2d);
2437 	mtree_test_insert(mt, 12, (void *)0x19);
2438 	mtree_test_erase(mt, 1);
2439 	mtree_test_insert(mt, 1, (void *)0x3);
2440 	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2441 	mtree_test_erase(mt, 62);
2442 	mtree_test_erase(mt, 1);
2443 	mtree_test_load(mt, 1);
2444 	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2445 	mtree_test_insert(mt, 1, (void *)0x3);
2446 	mtree_test_erase(mt, 1);
2447 	mtree_test_load(mt, 53);
2448 	mtree_test_load(mt, 1);
2449 	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2450 	mtree_test_insert(mt, 222, (void *)0x1bd);
2451 	mtree_test_insert(mt, 485, (void *)0x3cb);
2452 	mtree_test_insert(mt, 1, (void *)0x3);
2453 	mtree_test_erase(mt, 1);
2454 	mtree_test_insert(mt, 1, (void *)0x3);
2455 	mtree_test_load(mt, 0);
2456 	mtree_test_load(mt, 0);
2457 	mtree_destroy(mt);
2458 
2459 	/*
2460 	 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2461 	 * data by overwriting it first - that way metadata is of no concern.
2462 	 */
2463 	mt_init_flags(mt, 0);
2464 	mtree_test_load(mt, 1);
2465 	mtree_test_insert(mt, 102, (void *)0xcd);
2466 	mtree_test_erase(mt, 2);
2467 	mtree_test_erase(mt, 0);
2468 	mtree_test_load(mt, 0);
2469 	mtree_test_insert(mt, 4, (void *)0x9);
2470 	mtree_test_insert(mt, 2, (void *)0x5);
2471 	mtree_test_insert(mt, 110, (void *)0xdd);
2472 	mtree_test_insert(mt, 1, (void *)0x3);
2473 	mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2474 	mtree_test_erase(mt, 2);
2475 	mtree_test_store(mt, 0, (void *)0x1);
2476 	mtree_test_store(mt, 112, (void *)0xe1);
2477 	mtree_test_insert(mt, 21, (void *)0x2b);
2478 	mtree_test_store(mt, 1, (void *)0x3);
2479 	mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2480 	mtree_test_store(mt, 2, (void *)0x5);
2481 	mtree_test_load(mt, 22);
2482 	mtree_test_erase(mt, 2);
2483 	mtree_test_store(mt, 210, (void *)0x1a5);
2484 	mtree_test_store_range(mt, 0, 2, (void *)0x1);
2485 	mtree_test_store(mt, 2, (void *)0x5);
2486 	mtree_test_erase(mt, 2);
2487 	mtree_test_erase(mt, 22);
2488 	mtree_test_erase(mt, 1);
2489 	mtree_test_erase(mt, 2);
2490 	mtree_test_store(mt, 0, (void *)0x1);
2491 	mtree_test_load(mt, 112);
2492 	mtree_test_insert(mt, 2, (void *)0x5);
2493 	mtree_test_erase(mt, 2);
2494 	mtree_test_store(mt, 1, (void *)0x3);
2495 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2496 	mtree_test_erase(mt, 0);
2497 	mtree_test_erase(mt, 2);
2498 	mtree_test_store(mt, 2, (void *)0x5);
2499 	mtree_test_erase(mt, 0);
2500 	mtree_test_erase(mt, 2);
2501 	mtree_test_store(mt, 0, (void *)0x1);
2502 	mtree_test_store(mt, 0, (void *)0x1);
2503 	mtree_test_erase(mt, 2);
2504 	mtree_test_store(mt, 2, (void *)0x5);
2505 	mtree_test_erase(mt, 2);
2506 	mtree_test_insert(mt, 2, (void *)0x5);
2507 	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2508 	mtree_test_erase(mt, 0);
2509 	mtree_test_erase(mt, 2);
2510 	mtree_test_store(mt, 0, (void *)0x1);
2511 	mtree_test_load(mt, 112);
2512 	mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2513 	mtree_test_store(mt, 2, (void *)0x5);
2514 	mtree_test_load(mt, 110);
2515 	mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2516 	mtree_test_load(mt, 2);
2517 	mtree_test_store(mt, 2, (void *)0x5);
2518 	mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2519 	mtree_test_erase(mt, 12);
2520 	mtree_test_store(mt, 2, (void *)0x5);
2521 	mtree_test_load(mt, 22);
2522 	mtree_destroy(mt);
2523 
2524 
2525 	/*
2526 	 * 8.  When rebalancing or spanning_rebalance(), the max of the new node
2527 	 * may be set incorrectly to the final pivot and not the right max.
2528 	 * Fix by setting the left max to orig right max if the entire node is
2529 	 * consumed.
2530 	 */
2531 	mt_init_flags(mt, 0);
2532 	mtree_test_store(mt, 6, (void *)0xd);
2533 	mtree_test_store(mt, 67, (void *)0x87);
2534 	mtree_test_insert(mt, 15, (void *)0x1f);
2535 	mtree_test_insert(mt, 6716, (void *)0x3479);
2536 	mtree_test_store(mt, 61, (void *)0x7b);
2537 	mtree_test_insert(mt, 13, (void *)0x1b);
2538 	mtree_test_store(mt, 8, (void *)0x11);
2539 	mtree_test_insert(mt, 1, (void *)0x3);
2540 	mtree_test_load(mt, 0);
2541 	mtree_test_erase(mt, 67167);
2542 	mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2543 	mtree_test_insert(mt, 6, (void *)0xd);
2544 	mtree_test_erase(mt, 67);
2545 	mtree_test_insert(mt, 1, (void *)0x3);
2546 	mtree_test_erase(mt, 667167);
2547 	mtree_test_insert(mt, 6, (void *)0xd);
2548 	mtree_test_store(mt, 67, (void *)0x87);
2549 	mtree_test_insert(mt, 5, (void *)0xb);
2550 	mtree_test_erase(mt, 1);
2551 	mtree_test_insert(mt, 6, (void *)0xd);
2552 	mtree_test_erase(mt, 67);
2553 	mtree_test_insert(mt, 15, (void *)0x1f);
2554 	mtree_test_insert(mt, 67167, (void *)0x20cbf);
2555 	mtree_test_insert(mt, 1, (void *)0x3);
2556 	mtree_test_load(mt, 7);
2557 	mtree_test_insert(mt, 16, (void *)0x21);
2558 	mtree_test_insert(mt, 36, (void *)0x49);
2559 	mtree_test_store(mt, 67, (void *)0x87);
2560 	mtree_test_store(mt, 6, (void *)0xd);
2561 	mtree_test_insert(mt, 367, (void *)0x2df);
2562 	mtree_test_insert(mt, 115, (void *)0xe7);
2563 	mtree_test_store(mt, 0, (void *)0x1);
2564 	mtree_test_store_range(mt, 1, 3, (void *)0x3);
2565 	mtree_test_store(mt, 1, (void *)0x3);
2566 	mtree_test_erase(mt, 67167);
2567 	mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2568 	mtree_test_store(mt, 1, (void *)0x3);
2569 	mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2570 	mtree_test_load(mt, 67);
2571 	mtree_test_insert(mt, 1, (void *)0x3);
2572 	mtree_test_erase(mt, 67167);
2573 	mtree_destroy(mt);
2574 
2575 	/*
2576 	 * 9. spanning store to the end of data caused an invalid metadata
2577 	 * length which resulted in a crash eventually.
2578 	 * Fix by checking if there is a value in pivot before incrementing the
2579 	 * metadata end in mab_mas_cp().  To ensure this doesn't happen again,
2580 	 * abstract the two locations this happens into a function called
2581 	 * mas_leaf_set_meta().
2582 	 */
2583 	mt_init_flags(mt, 0);
2584 	mtree_test_insert(mt, 21, (void *)0x2b);
2585 	mtree_test_insert(mt, 12, (void *)0x19);
2586 	mtree_test_insert(mt, 6, (void *)0xd);
2587 	mtree_test_insert(mt, 8, (void *)0x11);
2588 	mtree_test_insert(mt, 2, (void *)0x5);
2589 	mtree_test_insert(mt, 91, (void *)0xb7);
2590 	mtree_test_insert(mt, 18, (void *)0x25);
2591 	mtree_test_insert(mt, 81, (void *)0xa3);
2592 	mtree_test_store_range(mt, 0, 128, (void *)0x1);
2593 	mtree_test_store(mt, 1, (void *)0x3);
2594 	mtree_test_erase(mt, 8);
2595 	mtree_test_insert(mt, 11, (void *)0x17);
2596 	mtree_test_insert(mt, 8, (void *)0x11);
2597 	mtree_test_insert(mt, 21, (void *)0x2b);
2598 	mtree_test_insert(mt, 2, (void *)0x5);
2599 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2600 	mtree_test_erase(mt, ULONG_MAX - 10);
2601 	mtree_test_store_range(mt, 0, 281, (void *)0x1);
2602 	mtree_test_erase(mt, 2);
2603 	mtree_test_insert(mt, 1211, (void *)0x977);
2604 	mtree_test_insert(mt, 111, (void *)0xdf);
2605 	mtree_test_insert(mt, 13, (void *)0x1b);
2606 	mtree_test_insert(mt, 211, (void *)0x1a7);
2607 	mtree_test_insert(mt, 11, (void *)0x17);
2608 	mtree_test_insert(mt, 5, (void *)0xb);
2609 	mtree_test_insert(mt, 1218, (void *)0x985);
2610 	mtree_test_insert(mt, 61, (void *)0x7b);
2611 	mtree_test_store(mt, 1, (void *)0x3);
2612 	mtree_test_insert(mt, 121, (void *)0xf3);
2613 	mtree_test_insert(mt, 8, (void *)0x11);
2614 	mtree_test_insert(mt, 21, (void *)0x2b);
2615 	mtree_test_insert(mt, 2, (void *)0x5);
2616 	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2617 	mtree_test_erase(mt, ULONG_MAX - 10);
2618 }
2619 
2620 /* duplicate the tree with a specific gap */
check_dup_gaps(struct maple_tree * mt,unsigned long nr_entries,bool zero_start,unsigned long gap)2621 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2622 				    unsigned long nr_entries, bool zero_start,
2623 				    unsigned long gap)
2624 {
2625 	unsigned long i = 0;
2626 	struct maple_tree newmt;
2627 	int ret;
2628 	void *tmp;
2629 	MA_STATE(mas, mt, 0, 0);
2630 	MA_STATE(newmas, &newmt, 0, 0);
2631 	struct rw_semaphore newmt_lock;
2632 
2633 	init_rwsem(&newmt_lock);
2634 	mt_set_external_lock(&newmt, &newmt_lock);
2635 
2636 	if (!zero_start)
2637 		i = 1;
2638 
2639 	mt_zero_nr_tallocated();
2640 	for (; i <= nr_entries; i++)
2641 		mtree_store_range(mt, i*10, (i+1)*10 - gap,
2642 				  xa_mk_value(i), GFP_KERNEL);
2643 
2644 	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2645 	mt_set_non_kernel(99999);
2646 	down_write(&newmt_lock);
2647 	ret = mas_expected_entries(&newmas, nr_entries);
2648 	mt_set_non_kernel(0);
2649 	MT_BUG_ON(mt, ret != 0);
2650 
2651 	rcu_read_lock();
2652 	mas_for_each(&mas, tmp, ULONG_MAX) {
2653 		newmas.index = mas.index;
2654 		newmas.last = mas.last;
2655 		mas_store(&newmas, tmp);
2656 	}
2657 	rcu_read_unlock();
2658 	mas_destroy(&newmas);
2659 
2660 	__mt_destroy(&newmt);
2661 	up_write(&newmt_lock);
2662 }
2663 
2664 /* Duplicate many sizes of trees.  Mainly to test expected entry values */
check_dup(struct maple_tree * mt)2665 static noinline void __init check_dup(struct maple_tree *mt)
2666 {
2667 	int i;
2668 	int big_start = 100010;
2669 
2670 	/* Check with a value at zero */
2671 	for (i = 10; i < 1000; i++) {
2672 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2673 		check_dup_gaps(mt, i, true, 5);
2674 		mtree_destroy(mt);
2675 		rcu_barrier();
2676 	}
2677 
2678 	cond_resched();
2679 	mt_cache_shrink();
2680 	/* Check with a value at zero, no gap */
2681 	for (i = 1000; i < 2000; i++) {
2682 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2683 		check_dup_gaps(mt, i, true, 0);
2684 		mtree_destroy(mt);
2685 		rcu_barrier();
2686 	}
2687 
2688 	cond_resched();
2689 	mt_cache_shrink();
2690 	/* Check with a value at zero and unreasonably large */
2691 	for (i = big_start; i < big_start + 10; i++) {
2692 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2693 		check_dup_gaps(mt, i, true, 5);
2694 		mtree_destroy(mt);
2695 		rcu_barrier();
2696 	}
2697 
2698 	cond_resched();
2699 	mt_cache_shrink();
2700 	/* Small to medium size not starting at zero*/
2701 	for (i = 200; i < 1000; i++) {
2702 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2703 		check_dup_gaps(mt, i, false, 5);
2704 		mtree_destroy(mt);
2705 		rcu_barrier();
2706 	}
2707 
2708 	cond_resched();
2709 	mt_cache_shrink();
2710 	/* Unreasonably large not starting at zero*/
2711 	for (i = big_start; i < big_start + 10; i++) {
2712 		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2713 		check_dup_gaps(mt, i, false, 5);
2714 		mtree_destroy(mt);
2715 		rcu_barrier();
2716 		cond_resched();
2717 		mt_cache_shrink();
2718 	}
2719 
2720 	/* Check non-allocation tree not starting at zero */
2721 	for (i = 1500; i < 3000; i++) {
2722 		mt_init_flags(mt, 0);
2723 		check_dup_gaps(mt, i, false, 5);
2724 		mtree_destroy(mt);
2725 		rcu_barrier();
2726 		cond_resched();
2727 		if (i % 2 == 0)
2728 			mt_cache_shrink();
2729 	}
2730 
2731 	mt_cache_shrink();
2732 	/* Check non-allocation tree starting at zero */
2733 	for (i = 200; i < 1000; i++) {
2734 		mt_init_flags(mt, 0);
2735 		check_dup_gaps(mt, i, true, 5);
2736 		mtree_destroy(mt);
2737 		rcu_barrier();
2738 		cond_resched();
2739 	}
2740 
2741 	mt_cache_shrink();
2742 	/* Unreasonably large */
2743 	for (i = big_start + 5; i < big_start + 10; i++) {
2744 		mt_init_flags(mt, 0);
2745 		check_dup_gaps(mt, i, true, 5);
2746 		mtree_destroy(mt);
2747 		rcu_barrier();
2748 		mt_cache_shrink();
2749 		cond_resched();
2750 	}
2751 }
2752 
check_bnode_min_spanning(struct maple_tree * mt)2753 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2754 {
2755 	int i = 50;
2756 	MA_STATE(mas, mt, 0, 0);
2757 
2758 	mt_set_non_kernel(9999);
2759 	mas_lock(&mas);
2760 	do {
2761 		mas_set_range(&mas, i*10, i*10+9);
2762 		mas_store(&mas, check_bnode_min_spanning);
2763 	} while (i--);
2764 
2765 	mas_set_range(&mas, 240, 509);
2766 	mas_store(&mas, NULL);
2767 	mas_unlock(&mas);
2768 	mas_destroy(&mas);
2769 	mt_set_non_kernel(0);
2770 }
2771 
check_empty_area_window(struct maple_tree * mt)2772 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2773 {
2774 	unsigned long i, nr_entries = 20;
2775 	MA_STATE(mas, mt, 0, 0);
2776 
2777 	for (i = 1; i <= nr_entries; i++)
2778 		mtree_store_range(mt, i*10, i*10 + 9,
2779 				  xa_mk_value(i), GFP_KERNEL);
2780 
2781 	/* Create another hole besides the one at 0 */
2782 	mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2783 
2784 	/* Check lower bounds that don't fit */
2785 	rcu_read_lock();
2786 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2787 
2788 	mas_reset(&mas);
2789 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2790 
2791 	/* Check lower bound that does fit */
2792 	mas_reset(&mas);
2793 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2794 	MT_BUG_ON(mt, mas.index != 5);
2795 	MT_BUG_ON(mt, mas.last != 9);
2796 	rcu_read_unlock();
2797 
2798 	/* Check one gap that doesn't fit and one that does */
2799 	rcu_read_lock();
2800 	mas_reset(&mas);
2801 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2802 	MT_BUG_ON(mt, mas.index != 161);
2803 	MT_BUG_ON(mt, mas.last != 169);
2804 
2805 	/* Check one gap that does fit above the min */
2806 	mas_reset(&mas);
2807 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2808 	MT_BUG_ON(mt, mas.index != 216);
2809 	MT_BUG_ON(mt, mas.last != 218);
2810 
2811 	/* Check size that doesn't fit any gap */
2812 	mas_reset(&mas);
2813 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2814 
2815 	/*
2816 	 * Check size that doesn't fit the lower end of the window but
2817 	 * does fit the gap
2818 	 */
2819 	mas_reset(&mas);
2820 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2821 
2822 	/*
2823 	 * Check size that doesn't fit the upper end of the window but
2824 	 * does fit the gap
2825 	 */
2826 	mas_reset(&mas);
2827 	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2828 
2829 	/* Check mas_empty_area forward */
2830 	mas_reset(&mas);
2831 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2832 	MT_BUG_ON(mt, mas.index != 0);
2833 	MT_BUG_ON(mt, mas.last != 8);
2834 
2835 	mas_reset(&mas);
2836 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2837 	MT_BUG_ON(mt, mas.index != 0);
2838 	MT_BUG_ON(mt, mas.last != 3);
2839 
2840 	mas_reset(&mas);
2841 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2842 
2843 	mas_reset(&mas);
2844 	MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2845 
2846 	mas_reset(&mas);
2847 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2848 
2849 	mas_reset(&mas);
2850 	mas_empty_area(&mas, 100, 165, 3);
2851 
2852 	mas_reset(&mas);
2853 	MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2854 	rcu_read_unlock();
2855 }
2856 
check_empty_area_fill(struct maple_tree * mt)2857 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2858 {
2859 	const unsigned long max = 0x25D78000;
2860 	unsigned long size;
2861 	int loop, shift;
2862 	MA_STATE(mas, mt, 0, 0);
2863 
2864 	mt_set_non_kernel(99999);
2865 	for (shift = 12; shift <= 16; shift++) {
2866 		loop = 5000;
2867 		size = 1 << shift;
2868 		while (loop--) {
2869 			mas_set(&mas, 0);
2870 			mas_lock(&mas);
2871 			MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2872 			MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2873 			mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2874 			mas_unlock(&mas);
2875 			mas_reset(&mas);
2876 		}
2877 	}
2878 
2879 	/* No space left. */
2880 	size = 0x1000;
2881 	rcu_read_lock();
2882 	MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2883 	rcu_read_unlock();
2884 
2885 	/* Fill a depth 3 node to the maximum */
2886 	for (unsigned long i = 629440511; i <= 629440800; i += 6)
2887 		mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2888 	/* Make space in the second-last depth 4 node */
2889 	mtree_erase(mt, 631668735);
2890 	/* Make space in the last depth 4 node */
2891 	mtree_erase(mt, 629506047);
2892 	mas_reset(&mas);
2893 	/* Search from just after the gap in the second-last depth 4 */
2894 	rcu_read_lock();
2895 	MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2896 	rcu_read_unlock();
2897 	mt_set_non_kernel(0);
2898 }
2899 
2900 /*
2901  * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2902  *
2903  * The table below shows the single entry tree (0-0 pointer) and normal tree
2904  * with nodes.
2905  *
2906  * Function	ENTRY	Start		Result		index & last
2907  *     ┬          ┬       ┬               ┬                ┬
2908  *     │          │       │               │                └─ the final range
2909  *     │          │       │               └─ The node value after execution
2910  *     │          │       └─ The node value before execution
2911  *     │          └─ If the entry exists or does not exists (DNE)
2912  *     └─ The function name
2913  *
2914  * Function	ENTRY	Start		Result		index & last
2915  * mas_next()
2916  *  - after last
2917  *			Single entry tree at 0-0
2918  *			------------------------
2919  *		DNE	MAS_START	MAS_NONE	1 - oo
2920  *		DNE	MAS_PAUSE	MAS_NONE	1 - oo
2921  *		DNE	MAS_ROOT	MAS_NONE	1 - oo
2922  *			when index = 0
2923  *		DNE	MAS_NONE	MAS_ROOT	0
2924  *			when index > 0
2925  *		DNE	MAS_NONE	MAS_NONE	1 - oo
2926  *
2927  *			Normal tree
2928  *			-----------
2929  *		exists	MAS_START	active		range
2930  *		DNE	MAS_START	active		set to last range
2931  *		exists	MAS_PAUSE	active		range
2932  *		DNE	MAS_PAUSE	active		set to last range
2933  *		exists	MAS_NONE	active		range
2934  *		exists	active		active		range
2935  *		DNE	active		active		set to last range
2936  *		ERANGE	active		MAS_OVERFLOW	last range
2937  *
2938  * Function	ENTRY	Start		Result		index & last
2939  * mas_prev()
2940  * - before index
2941  *			Single entry tree at 0-0
2942  *			------------------------
2943  *				if index > 0
2944  *		exists	MAS_START	MAS_ROOT	0
2945  *		exists	MAS_PAUSE	MAS_ROOT	0
2946  *		exists	MAS_NONE	MAS_ROOT	0
2947  *
2948  *				if index == 0
2949  *		DNE	MAS_START	MAS_NONE	0
2950  *		DNE	MAS_PAUSE	MAS_NONE	0
2951  *		DNE	MAS_NONE	MAS_NONE	0
2952  *		DNE	MAS_ROOT	MAS_NONE	0
2953  *
2954  *			Normal tree
2955  *			-----------
2956  *		exists	MAS_START	active		range
2957  *		DNE	MAS_START	active		set to min
2958  *		exists	MAS_PAUSE	active		range
2959  *		DNE	MAS_PAUSE	active		set to min
2960  *		exists	MAS_NONE	active		range
2961  *		DNE	MAS_NONE	MAS_NONE	set to min
2962  *		any	MAS_ROOT	MAS_NONE	0
2963  *		exists	active		active		range
2964  *		DNE	active		active		last range
2965  *		ERANGE	active		MAS_UNDERFLOW	last range
2966  *
2967  * Function	ENTRY	Start		Result		index & last
2968  * mas_find()
2969  *  - at index or next
2970  *			Single entry tree at 0-0
2971  *			------------------------
2972  *				if index >  0
2973  *		DNE	MAS_START	MAS_NONE	0
2974  *		DNE	MAS_PAUSE	MAS_NONE	0
2975  *		DNE	MAS_ROOT	MAS_NONE	0
2976  *		DNE	MAS_NONE	MAS_NONE	1
2977  *				if index ==  0
2978  *		exists	MAS_START	MAS_ROOT	0
2979  *		exists	MAS_PAUSE	MAS_ROOT	0
2980  *		exists	MAS_NONE	MAS_ROOT	0
2981  *
2982  *			Normal tree
2983  *			-----------
2984  *		exists	MAS_START	active		range
2985  *		DNE	MAS_START	active		set to max
2986  *		exists	MAS_PAUSE	active		range
2987  *		DNE	MAS_PAUSE	active		set to max
2988  *		exists	MAS_NONE	active		range (start at last)
2989  *		exists	active		active		range
2990  *		DNE	active		active		last range (max < last)
2991  *
2992  * Function	ENTRY	Start		Result		index & last
2993  * mas_find_rev()
2994  *  - at index or before
2995  *			Single entry tree at 0-0
2996  *			------------------------
2997  *				if index >  0
2998  *		exists	MAS_START	MAS_ROOT	0
2999  *		exists	MAS_PAUSE	MAS_ROOT	0
3000  *		exists	MAS_NONE	MAS_ROOT	0
3001  *				if index ==  0
3002  *		DNE	MAS_START	MAS_NONE	0
3003  *		DNE	MAS_PAUSE	MAS_NONE	0
3004  *		DNE	MAS_NONE	MAS_NONE	0
3005  *		DNE	MAS_ROOT	MAS_NONE	0
3006  *
3007  *			Normal tree
3008  *			-----------
3009  *		exists	MAS_START	active		range
3010  *		DNE	MAS_START	active		set to min
3011  *		exists	MAS_PAUSE	active		range
3012  *		DNE	MAS_PAUSE	active		set to min
3013  *		exists	MAS_NONE	active		range (start at index)
3014  *		exists	active		active		range
3015  *		DNE	active		active		last range (min > index)
3016  *
3017  * Function	ENTRY	Start		Result		index & last
3018  * mas_walk()
3019  * - Look up index
3020  *			Single entry tree at 0-0
3021  *			------------------------
3022  *				if index >  0
3023  *		DNE	MAS_START	MAS_ROOT	1 - oo
3024  *		DNE	MAS_PAUSE	MAS_ROOT	1 - oo
3025  *		DNE	MAS_NONE	MAS_ROOT	1 - oo
3026  *		DNE	MAS_ROOT	MAS_ROOT	1 - oo
3027  *				if index ==  0
3028  *		exists	MAS_START	MAS_ROOT	0
3029  *		exists	MAS_PAUSE	MAS_ROOT	0
3030  *		exists	MAS_NONE	MAS_ROOT	0
3031  *		exists	MAS_ROOT	MAS_ROOT	0
3032  *
3033  *			Normal tree
3034  *			-----------
3035  *		exists	MAS_START	active		range
3036  *		DNE	MAS_START	active		range of NULL
3037  *		exists	MAS_PAUSE	active		range
3038  *		DNE	MAS_PAUSE	active		range of NULL
3039  *		exists	MAS_NONE	active		range
3040  *		DNE	MAS_NONE	active		range of NULL
3041  *		exists	active		active		range
3042  *		DNE	active		active		range of NULL
3043  */
3044 
3045 #define mas_active(x)		(((x).node != MAS_ROOT) && \
3046 				 ((x).node != MAS_START) && \
3047 				 ((x).node != MAS_PAUSE) && \
3048 				 ((x).node != MAS_NONE))
check_state_handling(struct maple_tree * mt)3049 static noinline void __init check_state_handling(struct maple_tree *mt)
3050 {
3051 	MA_STATE(mas, mt, 0, 0);
3052 	void *entry, *ptr = (void *) 0x1234500;
3053 	void *ptr2 = &ptr;
3054 	void *ptr3 = &ptr2;
3055 
3056 	/* Check MAS_ROOT First */
3057 	mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3058 
3059 	mas_lock(&mas);
3060 	/* prev: Start -> underflow*/
3061 	entry = mas_prev(&mas, 0);
3062 	MT_BUG_ON(mt, entry != NULL);
3063 	MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3064 
3065 	/* prev: Start -> root */
3066 	mas_set(&mas, 10);
3067 	entry = mas_prev(&mas, 0);
3068 	MT_BUG_ON(mt, entry != ptr);
3069 	MT_BUG_ON(mt, mas.index != 0);
3070 	MT_BUG_ON(mt, mas.last != 0);
3071 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3072 
3073 	/* prev: pause -> root */
3074 	mas_set(&mas, 10);
3075 	mas_pause(&mas);
3076 	entry = mas_prev(&mas, 0);
3077 	MT_BUG_ON(mt, entry != ptr);
3078 	MT_BUG_ON(mt, mas.index != 0);
3079 	MT_BUG_ON(mt, mas.last != 0);
3080 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3081 
3082 	/* next: start -> none */
3083 	mas_set(&mas, 0);
3084 	entry = mas_next(&mas, ULONG_MAX);
3085 	MT_BUG_ON(mt, mas.index != 1);
3086 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3087 	MT_BUG_ON(mt, entry != NULL);
3088 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3089 
3090 	/* next: start -> none*/
3091 	mas_set(&mas, 10);
3092 	entry = mas_next(&mas, ULONG_MAX);
3093 	MT_BUG_ON(mt, mas.index != 1);
3094 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3095 	MT_BUG_ON(mt, entry != NULL);
3096 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3097 
3098 	/* find: start -> root */
3099 	mas_set(&mas, 0);
3100 	entry = mas_find(&mas, ULONG_MAX);
3101 	MT_BUG_ON(mt, entry != ptr);
3102 	MT_BUG_ON(mt, mas.index != 0);
3103 	MT_BUG_ON(mt, mas.last != 0);
3104 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3105 
3106 	/* find: root -> none */
3107 	entry = mas_find(&mas, ULONG_MAX);
3108 	MT_BUG_ON(mt, entry != NULL);
3109 	MT_BUG_ON(mt, mas.index != 1);
3110 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3111 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3112 
3113 	/* find: none -> none */
3114 	entry = mas_find(&mas, ULONG_MAX);
3115 	MT_BUG_ON(mt, entry != NULL);
3116 	MT_BUG_ON(mt, mas.index != 1);
3117 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3118 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3119 
3120 	/* find: start -> none */
3121 	mas_set(&mas, 10);
3122 	entry = mas_find(&mas, ULONG_MAX);
3123 	MT_BUG_ON(mt, entry != NULL);
3124 	MT_BUG_ON(mt, mas.index != 1);
3125 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3126 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3127 
3128 	/* find_rev: none -> root */
3129 	entry = mas_find_rev(&mas, 0);
3130 	MT_BUG_ON(mt, entry != ptr);
3131 	MT_BUG_ON(mt, mas.index != 0);
3132 	MT_BUG_ON(mt, mas.last != 0);
3133 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3134 
3135 	/* find_rev: start -> root */
3136 	mas_set(&mas, 0);
3137 	entry = mas_find_rev(&mas, 0);
3138 	MT_BUG_ON(mt, entry != ptr);
3139 	MT_BUG_ON(mt, mas.index != 0);
3140 	MT_BUG_ON(mt, mas.last != 0);
3141 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3142 
3143 	/* find_rev: root -> none */
3144 	entry = mas_find_rev(&mas, 0);
3145 	MT_BUG_ON(mt, entry != NULL);
3146 	MT_BUG_ON(mt, mas.index != 0);
3147 	MT_BUG_ON(mt, mas.last != 0);
3148 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3149 
3150 	/* find_rev: none -> none */
3151 	entry = mas_find_rev(&mas, 0);
3152 	MT_BUG_ON(mt, entry != NULL);
3153 	MT_BUG_ON(mt, mas.index != 0);
3154 	MT_BUG_ON(mt, mas.last != 0);
3155 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3156 
3157 	/* find_rev: start -> root */
3158 	mas_set(&mas, 10);
3159 	entry = mas_find_rev(&mas, 0);
3160 	MT_BUG_ON(mt, entry != ptr);
3161 	MT_BUG_ON(mt, mas.index != 0);
3162 	MT_BUG_ON(mt, mas.last != 0);
3163 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3164 
3165 	/* walk: start -> none */
3166 	mas_set(&mas, 10);
3167 	entry = mas_walk(&mas);
3168 	MT_BUG_ON(mt, entry != NULL);
3169 	MT_BUG_ON(mt, mas.index != 1);
3170 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3171 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3172 
3173 	/* walk: pause -> none*/
3174 	mas_set(&mas, 10);
3175 	mas_pause(&mas);
3176 	entry = mas_walk(&mas);
3177 	MT_BUG_ON(mt, entry != NULL);
3178 	MT_BUG_ON(mt, mas.index != 1);
3179 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3180 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3181 
3182 	/* walk: none -> none */
3183 	mas.index = mas.last = 10;
3184 	entry = mas_walk(&mas);
3185 	MT_BUG_ON(mt, entry != NULL);
3186 	MT_BUG_ON(mt, mas.index != 1);
3187 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3188 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3189 
3190 	/* walk: none -> none */
3191 	entry = mas_walk(&mas);
3192 	MT_BUG_ON(mt, entry != NULL);
3193 	MT_BUG_ON(mt, mas.index != 1);
3194 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3195 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3196 
3197 	/* walk: start -> root */
3198 	mas_set(&mas, 0);
3199 	entry = mas_walk(&mas);
3200 	MT_BUG_ON(mt, entry != ptr);
3201 	MT_BUG_ON(mt, mas.index != 0);
3202 	MT_BUG_ON(mt, mas.last != 0);
3203 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3204 
3205 	/* walk: pause -> root */
3206 	mas_set(&mas, 0);
3207 	mas_pause(&mas);
3208 	entry = mas_walk(&mas);
3209 	MT_BUG_ON(mt, entry != ptr);
3210 	MT_BUG_ON(mt, mas.index != 0);
3211 	MT_BUG_ON(mt, mas.last != 0);
3212 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3213 
3214 	/* walk: none -> root */
3215 	mas.node = MAS_NONE;
3216 	entry = mas_walk(&mas);
3217 	MT_BUG_ON(mt, entry != ptr);
3218 	MT_BUG_ON(mt, mas.index != 0);
3219 	MT_BUG_ON(mt, mas.last != 0);
3220 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3221 
3222 	/* walk: root -> root */
3223 	entry = mas_walk(&mas);
3224 	MT_BUG_ON(mt, entry != ptr);
3225 	MT_BUG_ON(mt, mas.index != 0);
3226 	MT_BUG_ON(mt, mas.last != 0);
3227 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3228 
3229 	/* walk: root -> none */
3230 	mas_set(&mas, 10);
3231 	entry = mas_walk(&mas);
3232 	MT_BUG_ON(mt, entry != NULL);
3233 	MT_BUG_ON(mt, mas.index != 1);
3234 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3235 	MT_BUG_ON(mt, mas.node != MAS_NONE);
3236 
3237 	/* walk: none -> root */
3238 	mas.index = mas.last = 0;
3239 	entry = mas_walk(&mas);
3240 	MT_BUG_ON(mt, entry != ptr);
3241 	MT_BUG_ON(mt, mas.index != 0);
3242 	MT_BUG_ON(mt, mas.last != 0);
3243 	MT_BUG_ON(mt, mas.node != MAS_ROOT);
3244 
3245 	mas_unlock(&mas);
3246 
3247 	/* Check when there is an actual node */
3248 	mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3249 	mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3250 	mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3251 	mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3252 
3253 	mas_lock(&mas);
3254 
3255 	/* next: start ->active */
3256 	mas_set(&mas, 0);
3257 	entry = mas_next(&mas, ULONG_MAX);
3258 	MT_BUG_ON(mt, entry != ptr);
3259 	MT_BUG_ON(mt, mas.index != 0x1000);
3260 	MT_BUG_ON(mt, mas.last != 0x1500);
3261 	MT_BUG_ON(mt, !mas_active(mas));
3262 
3263 	/* next: pause ->active */
3264 	mas_set(&mas, 0);
3265 	mas_pause(&mas);
3266 	entry = mas_next(&mas, ULONG_MAX);
3267 	MT_BUG_ON(mt, entry != ptr);
3268 	MT_BUG_ON(mt, mas.index != 0x1000);
3269 	MT_BUG_ON(mt, mas.last != 0x1500);
3270 	MT_BUG_ON(mt, !mas_active(mas));
3271 
3272 	/* next: none ->active */
3273 	mas.index = mas.last = 0;
3274 	mas.offset = 0;
3275 	mas.node = MAS_NONE;
3276 	entry = mas_next(&mas, ULONG_MAX);
3277 	MT_BUG_ON(mt, entry != ptr);
3278 	MT_BUG_ON(mt, mas.index != 0x1000);
3279 	MT_BUG_ON(mt, mas.last != 0x1500);
3280 	MT_BUG_ON(mt, !mas_active(mas));
3281 
3282 	/* next:active ->active */
3283 	entry = mas_next(&mas, ULONG_MAX);
3284 	MT_BUG_ON(mt, entry != ptr2);
3285 	MT_BUG_ON(mt, mas.index != 0x2000);
3286 	MT_BUG_ON(mt, mas.last != 0x2500);
3287 	MT_BUG_ON(mt, !mas_active(mas));
3288 
3289 	/* next:active -> active beyond data */
3290 	entry = mas_next(&mas, 0x2999);
3291 	MT_BUG_ON(mt, entry != NULL);
3292 	MT_BUG_ON(mt, mas.index != 0x2501);
3293 	MT_BUG_ON(mt, mas.last != 0x2fff);
3294 	MT_BUG_ON(mt, !mas_active(mas));
3295 
3296 	/* Continue after last range ends after max */
3297 	entry = mas_next(&mas, ULONG_MAX);
3298 	MT_BUG_ON(mt, entry != ptr3);
3299 	MT_BUG_ON(mt, mas.index != 0x3000);
3300 	MT_BUG_ON(mt, mas.last != 0x3500);
3301 	MT_BUG_ON(mt, !mas_active(mas));
3302 
3303 	/* next:active -> active continued */
3304 	entry = mas_next(&mas, ULONG_MAX);
3305 	MT_BUG_ON(mt, entry != NULL);
3306 	MT_BUG_ON(mt, mas.index != 0x3501);
3307 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3308 	MT_BUG_ON(mt, !mas_active(mas));
3309 
3310 	/* next:active -> overflow  */
3311 	entry = mas_next(&mas, ULONG_MAX);
3312 	MT_BUG_ON(mt, entry != NULL);
3313 	MT_BUG_ON(mt, mas.index != 0x3501);
3314 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3315 	MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3316 
3317 	/* next:overflow -> overflow  */
3318 	entry = mas_next(&mas, ULONG_MAX);
3319 	MT_BUG_ON(mt, entry != NULL);
3320 	MT_BUG_ON(mt, mas.index != 0x3501);
3321 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3322 	MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3323 
3324 	/* prev:overflow -> active  */
3325 	entry = mas_prev(&mas, 0);
3326 	MT_BUG_ON(mt, entry != ptr3);
3327 	MT_BUG_ON(mt, mas.index != 0x3000);
3328 	MT_BUG_ON(mt, mas.last != 0x3500);
3329 	MT_BUG_ON(mt, !mas_active(mas));
3330 
3331 	/* next: none -> active, skip value at location */
3332 	mas_set(&mas, 0);
3333 	entry = mas_next(&mas, ULONG_MAX);
3334 	mas.node = MAS_NONE;
3335 	mas.offset = 0;
3336 	entry = mas_next(&mas, ULONG_MAX);
3337 	MT_BUG_ON(mt, entry != ptr2);
3338 	MT_BUG_ON(mt, mas.index != 0x2000);
3339 	MT_BUG_ON(mt, mas.last != 0x2500);
3340 	MT_BUG_ON(mt, !mas_active(mas));
3341 
3342 	/* prev:active ->active */
3343 	entry = mas_prev(&mas, 0);
3344 	MT_BUG_ON(mt, entry != ptr);
3345 	MT_BUG_ON(mt, mas.index != 0x1000);
3346 	MT_BUG_ON(mt, mas.last != 0x1500);
3347 	MT_BUG_ON(mt, !mas_active(mas));
3348 
3349 	/* prev:active -> active spanning end range */
3350 	entry = mas_prev(&mas, 0x0100);
3351 	MT_BUG_ON(mt, entry != NULL);
3352 	MT_BUG_ON(mt, mas.index != 0);
3353 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3354 	MT_BUG_ON(mt, !mas_active(mas));
3355 
3356 	/* prev:active -> underflow */
3357 	entry = mas_prev(&mas, 0);
3358 	MT_BUG_ON(mt, entry != NULL);
3359 	MT_BUG_ON(mt, mas.index != 0);
3360 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3361 	MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3362 
3363 	/* prev:underflow -> underflow */
3364 	entry = mas_prev(&mas, 0);
3365 	MT_BUG_ON(mt, entry != NULL);
3366 	MT_BUG_ON(mt, mas.index != 0);
3367 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3368 	MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3369 
3370 	/* next:underflow -> active */
3371 	entry = mas_next(&mas, ULONG_MAX);
3372 	MT_BUG_ON(mt, entry != ptr);
3373 	MT_BUG_ON(mt, mas.index != 0x1000);
3374 	MT_BUG_ON(mt, mas.last != 0x1500);
3375 	MT_BUG_ON(mt, !mas_active(mas));
3376 
3377 	/* prev:first value -> underflow */
3378 	entry = mas_prev(&mas, 0x1000);
3379 	MT_BUG_ON(mt, entry != NULL);
3380 	MT_BUG_ON(mt, mas.index != 0x1000);
3381 	MT_BUG_ON(mt, mas.last != 0x1500);
3382 	MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3383 
3384 	/* find:underflow -> first value */
3385 	entry = mas_find(&mas, ULONG_MAX);
3386 	MT_BUG_ON(mt, entry != ptr);
3387 	MT_BUG_ON(mt, mas.index != 0x1000);
3388 	MT_BUG_ON(mt, mas.last != 0x1500);
3389 	MT_BUG_ON(mt, !mas_active(mas));
3390 
3391 	/* prev: pause ->active */
3392 	mas_set(&mas, 0x3600);
3393 	entry = mas_prev(&mas, 0);
3394 	MT_BUG_ON(mt, entry != ptr3);
3395 	mas_pause(&mas);
3396 	entry = mas_prev(&mas, 0);
3397 	MT_BUG_ON(mt, entry != ptr2);
3398 	MT_BUG_ON(mt, mas.index != 0x2000);
3399 	MT_BUG_ON(mt, mas.last != 0x2500);
3400 	MT_BUG_ON(mt, !mas_active(mas));
3401 
3402 	/* prev:active -> active spanning min */
3403 	entry = mas_prev(&mas, 0x1600);
3404 	MT_BUG_ON(mt, entry != NULL);
3405 	MT_BUG_ON(mt, mas.index != 0x1501);
3406 	MT_BUG_ON(mt, mas.last != 0x1FFF);
3407 	MT_BUG_ON(mt, !mas_active(mas));
3408 
3409 	/* prev: active ->active, continue */
3410 	entry = mas_prev(&mas, 0);
3411 	MT_BUG_ON(mt, entry != ptr);
3412 	MT_BUG_ON(mt, mas.index != 0x1000);
3413 	MT_BUG_ON(mt, mas.last != 0x1500);
3414 	MT_BUG_ON(mt, !mas_active(mas));
3415 
3416 	/* find: start ->active */
3417 	mas_set(&mas, 0);
3418 	entry = mas_find(&mas, ULONG_MAX);
3419 	MT_BUG_ON(mt, entry != ptr);
3420 	MT_BUG_ON(mt, mas.index != 0x1000);
3421 	MT_BUG_ON(mt, mas.last != 0x1500);
3422 	MT_BUG_ON(mt, !mas_active(mas));
3423 
3424 	/* find: pause ->active */
3425 	mas_set(&mas, 0);
3426 	mas_pause(&mas);
3427 	entry = mas_find(&mas, ULONG_MAX);
3428 	MT_BUG_ON(mt, entry != ptr);
3429 	MT_BUG_ON(mt, mas.index != 0x1000);
3430 	MT_BUG_ON(mt, mas.last != 0x1500);
3431 	MT_BUG_ON(mt, !mas_active(mas));
3432 
3433 	/* find: start ->active on value */;
3434 	mas_set(&mas, 1200);
3435 	entry = mas_find(&mas, ULONG_MAX);
3436 	MT_BUG_ON(mt, entry != ptr);
3437 	MT_BUG_ON(mt, mas.index != 0x1000);
3438 	MT_BUG_ON(mt, mas.last != 0x1500);
3439 	MT_BUG_ON(mt, !mas_active(mas));
3440 
3441 	/* find:active ->active */
3442 	entry = mas_find(&mas, ULONG_MAX);
3443 	MT_BUG_ON(mt, entry != ptr2);
3444 	MT_BUG_ON(mt, mas.index != 0x2000);
3445 	MT_BUG_ON(mt, mas.last != 0x2500);
3446 	MT_BUG_ON(mt, !mas_active(mas));
3447 
3448 
3449 	/* find:active -> active (NULL)*/
3450 	entry = mas_find(&mas, 0x2700);
3451 	MT_BUG_ON(mt, entry != NULL);
3452 	MT_BUG_ON(mt, mas.index != 0x2501);
3453 	MT_BUG_ON(mt, mas.last != 0x2FFF);
3454 	MT_BUG_ON(mt, !mas_active(mas));
3455 
3456 	/* find: overflow ->active */
3457 	entry = mas_find(&mas, 0x5000);
3458 	MT_BUG_ON(mt, entry != ptr3);
3459 	MT_BUG_ON(mt, mas.index != 0x3000);
3460 	MT_BUG_ON(mt, mas.last != 0x3500);
3461 	MT_BUG_ON(mt, !mas_active(mas));
3462 
3463 	/* find:active -> active (NULL) end*/
3464 	entry = mas_find(&mas, ULONG_MAX);
3465 	MT_BUG_ON(mt, entry != NULL);
3466 	MT_BUG_ON(mt, mas.index != 0x3501);
3467 	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3468 	MT_BUG_ON(mt, !mas_active(mas));
3469 
3470 	/* find_rev: active (END) ->active */
3471 	entry = mas_find_rev(&mas, 0);
3472 	MT_BUG_ON(mt, entry != ptr3);
3473 	MT_BUG_ON(mt, mas.index != 0x3000);
3474 	MT_BUG_ON(mt, mas.last != 0x3500);
3475 	MT_BUG_ON(mt, !mas_active(mas));
3476 
3477 	/* find_rev:active ->active */
3478 	entry = mas_find_rev(&mas, 0);
3479 	MT_BUG_ON(mt, entry != ptr2);
3480 	MT_BUG_ON(mt, mas.index != 0x2000);
3481 	MT_BUG_ON(mt, mas.last != 0x2500);
3482 	MT_BUG_ON(mt, !mas_active(mas));
3483 
3484 	/* find_rev: pause ->active */
3485 	mas_pause(&mas);
3486 	entry = mas_find_rev(&mas, 0);
3487 	MT_BUG_ON(mt, entry != ptr);
3488 	MT_BUG_ON(mt, mas.index != 0x1000);
3489 	MT_BUG_ON(mt, mas.last != 0x1500);
3490 	MT_BUG_ON(mt, !mas_active(mas));
3491 
3492 	/* find_rev:active -> active */
3493 	entry = mas_find_rev(&mas, 0);
3494 	MT_BUG_ON(mt, entry != NULL);
3495 	MT_BUG_ON(mt, mas.index != 0);
3496 	MT_BUG_ON(mt, mas.last != 0x0FFF);
3497 	MT_BUG_ON(mt, !mas_active(mas));
3498 
3499 	/* find_rev: start ->active */
3500 	mas_set(&mas, 0x1200);
3501 	entry = mas_find_rev(&mas, 0);
3502 	MT_BUG_ON(mt, entry != ptr);
3503 	MT_BUG_ON(mt, mas.index != 0x1000);
3504 	MT_BUG_ON(mt, mas.last != 0x1500);
3505 	MT_BUG_ON(mt, !mas_active(mas));
3506 
3507 	/* mas_walk start ->active */
3508 	mas_set(&mas, 0x1200);
3509 	entry = mas_walk(&mas);
3510 	MT_BUG_ON(mt, entry != ptr);
3511 	MT_BUG_ON(mt, mas.index != 0x1000);
3512 	MT_BUG_ON(mt, mas.last != 0x1500);
3513 	MT_BUG_ON(mt, !mas_active(mas));
3514 
3515 	/* mas_walk start ->active */
3516 	mas_set(&mas, 0x1600);
3517 	entry = mas_walk(&mas);
3518 	MT_BUG_ON(mt, entry != NULL);
3519 	MT_BUG_ON(mt, mas.index != 0x1501);
3520 	MT_BUG_ON(mt, mas.last != 0x1fff);
3521 	MT_BUG_ON(mt, !mas_active(mas));
3522 
3523 	/* mas_walk pause ->active */
3524 	mas_set(&mas, 0x1200);
3525 	mas_pause(&mas);
3526 	entry = mas_walk(&mas);
3527 	MT_BUG_ON(mt, entry != ptr);
3528 	MT_BUG_ON(mt, mas.index != 0x1000);
3529 	MT_BUG_ON(mt, mas.last != 0x1500);
3530 	MT_BUG_ON(mt, !mas_active(mas));
3531 
3532 	/* mas_walk pause -> active */
3533 	mas_set(&mas, 0x1600);
3534 	mas_pause(&mas);
3535 	entry = mas_walk(&mas);
3536 	MT_BUG_ON(mt, entry != NULL);
3537 	MT_BUG_ON(mt, mas.index != 0x1501);
3538 	MT_BUG_ON(mt, mas.last != 0x1fff);
3539 	MT_BUG_ON(mt, !mas_active(mas));
3540 
3541 	/* mas_walk none -> active */
3542 	mas_set(&mas, 0x1200);
3543 	mas.node = MAS_NONE;
3544 	entry = mas_walk(&mas);
3545 	MT_BUG_ON(mt, entry != ptr);
3546 	MT_BUG_ON(mt, mas.index != 0x1000);
3547 	MT_BUG_ON(mt, mas.last != 0x1500);
3548 	MT_BUG_ON(mt, !mas_active(mas));
3549 
3550 	/* mas_walk none -> active */
3551 	mas_set(&mas, 0x1600);
3552 	mas.node = MAS_NONE;
3553 	entry = mas_walk(&mas);
3554 	MT_BUG_ON(mt, entry != NULL);
3555 	MT_BUG_ON(mt, mas.index != 0x1501);
3556 	MT_BUG_ON(mt, mas.last != 0x1fff);
3557 	MT_BUG_ON(mt, !mas_active(mas));
3558 
3559 	/* mas_walk active -> active */
3560 	mas.index = 0x1200;
3561 	mas.last = 0x1200;
3562 	mas.offset = 0;
3563 	entry = mas_walk(&mas);
3564 	MT_BUG_ON(mt, entry != ptr);
3565 	MT_BUG_ON(mt, mas.index != 0x1000);
3566 	MT_BUG_ON(mt, mas.last != 0x1500);
3567 	MT_BUG_ON(mt, !mas_active(mas));
3568 
3569 	/* mas_walk active -> active */
3570 	mas.index = 0x1600;
3571 	mas.last = 0x1600;
3572 	entry = mas_walk(&mas);
3573 	MT_BUG_ON(mt, entry != NULL);
3574 	MT_BUG_ON(mt, mas.index != 0x1501);
3575 	MT_BUG_ON(mt, mas.last != 0x1fff);
3576 	MT_BUG_ON(mt, !mas_active(mas));
3577 
3578 	mas_unlock(&mas);
3579 }
3580 
3581 static DEFINE_MTREE(tree);
maple_tree_seed(void)3582 static int __init maple_tree_seed(void)
3583 {
3584 	unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3585 				1001, 1002, 1003, 1005, 0,
3586 				5003, 5002};
3587 	void *ptr = &set;
3588 
3589 	pr_info("\nTEST STARTING\n\n");
3590 
3591 #if defined(BENCH_SLOT_STORE)
3592 #define BENCH
3593 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3594 	bench_slot_store(&tree);
3595 	mtree_destroy(&tree);
3596 	goto skip;
3597 #endif
3598 #if defined(BENCH_NODE_STORE)
3599 #define BENCH
3600 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3601 	bench_node_store(&tree);
3602 	mtree_destroy(&tree);
3603 	goto skip;
3604 #endif
3605 #if defined(BENCH_AWALK)
3606 #define BENCH
3607 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3608 	bench_awalk(&tree);
3609 	mtree_destroy(&tree);
3610 	goto skip;
3611 #endif
3612 #if defined(BENCH_WALK)
3613 #define BENCH
3614 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3615 	bench_walk(&tree);
3616 	mtree_destroy(&tree);
3617 	goto skip;
3618 #endif
3619 #if defined(BENCH_FORK)
3620 #define BENCH
3621 	bench_forking();
3622 	goto skip;
3623 #endif
3624 #if defined(BENCH_MT_FOR_EACH)
3625 #define BENCH
3626 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3627 	bench_mt_for_each(&tree);
3628 	mtree_destroy(&tree);
3629 	goto skip;
3630 #endif
3631 #if defined(BENCH_MAS_FOR_EACH)
3632 #define BENCH
3633 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3634 	bench_mas_for_each(&tree);
3635 	mtree_destroy(&tree);
3636 	goto skip;
3637 #endif
3638 #if defined(BENCH_MAS_PREV)
3639 #define BENCH
3640 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3641 	bench_mas_prev(&tree);
3642 	mtree_destroy(&tree);
3643 	goto skip;
3644 #endif
3645 
3646 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3647 	check_root_expand(&tree);
3648 	mtree_destroy(&tree);
3649 
3650 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3651 	check_iteration(&tree);
3652 	mtree_destroy(&tree);
3653 
3654 	check_forking();
3655 
3656 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3657 	check_mas_store_gfp(&tree);
3658 	mtree_destroy(&tree);
3659 
3660 	/* Test ranges (store and insert) */
3661 	mt_init_flags(&tree, 0);
3662 	check_ranges(&tree);
3663 	mtree_destroy(&tree);
3664 
3665 #if defined(CONFIG_64BIT)
3666 	/* These tests have ranges outside of 4GB */
3667 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3668 	check_alloc_range(&tree);
3669 	mtree_destroy(&tree);
3670 
3671 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3672 	check_alloc_rev_range(&tree);
3673 	mtree_destroy(&tree);
3674 #endif
3675 
3676 	mt_init_flags(&tree, 0);
3677 
3678 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3679 
3680 	check_insert(&tree, set[9], &tree);     /* Insert 0 */
3681 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3682 	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3683 
3684 	check_insert(&tree, set[10], ptr);      /* Insert 5003 */
3685 	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3686 	check_load(&tree, set[11], NULL);       /* See if 5002 -> NULL */
3687 	check_load(&tree, set[10], ptr);       /* See if 5003 -> ptr */
3688 
3689 	/* Clear out the tree */
3690 	mtree_destroy(&tree);
3691 
3692 	/* Try to insert, insert a dup, and load back what was inserted. */
3693 	mt_init_flags(&tree, 0);
3694 	check_insert(&tree, set[0], &tree);     /* Insert 5015 */
3695 	check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3696 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3697 
3698 	/*
3699 	 * Second set of tests try to load a value that doesn't exist, inserts
3700 	 * a second value, then loads the value again
3701 	 */
3702 	check_load(&tree, set[1], NULL);        /* See if 5014 -> NULL */
3703 	check_insert(&tree, set[1], ptr);       /* insert 5014 -> ptr */
3704 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3705 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3706 	/*
3707 	 * Tree currently contains:
3708 	 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3709 	 */
3710 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3711 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3712 
3713 	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3714 	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3715 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3716 	check_load(&tree, set[7], &tree);       /* 1003 = &tree ? */
3717 
3718 	/* Clear out tree */
3719 	mtree_destroy(&tree);
3720 
3721 	mt_init_flags(&tree, 0);
3722 	/* Test inserting into a NULL hole. */
3723 	check_insert(&tree, set[5], ptr);       /* insert 1001 -> ptr */
3724 	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3725 	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3726 	check_load(&tree, set[5], ptr);         /* See if 1001 -> ptr */
3727 	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3728 	check_load(&tree, set[7], &tree);       /* See if 1003 -> &tree */
3729 
3730 	/* Clear out the tree */
3731 	mtree_destroy(&tree);
3732 
3733 	mt_init_flags(&tree, 0);
3734 	/*
3735 	 *       set[] = {5015, 5014, 5017, 25, 1000,
3736 	 *                1001, 1002, 1003, 1005, 0,
3737 	 *                5003, 5002};
3738 	 */
3739 
3740 	check_insert(&tree, set[0], ptr); /* 5015 */
3741 	check_insert(&tree, set[1], &tree); /* 5014 */
3742 	check_insert(&tree, set[2], ptr); /* 5017 */
3743 	check_insert(&tree, set[3], &tree); /* 25 */
3744 	check_load(&tree, set[0], ptr);
3745 	check_load(&tree, set[1], &tree);
3746 	check_load(&tree, set[2], ptr);
3747 	check_load(&tree, set[3], &tree);
3748 	check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3749 	check_load(&tree, set[0], ptr);
3750 	check_load(&tree, set[1], &tree);
3751 	check_load(&tree, set[2], ptr);
3752 	check_load(&tree, set[3], &tree); /*25 */
3753 	check_load(&tree, set[4], ptr);
3754 	check_insert(&tree, set[5], &tree); /* 1001 */
3755 	check_load(&tree, set[0], ptr);
3756 	check_load(&tree, set[1], &tree);
3757 	check_load(&tree, set[2], ptr);
3758 	check_load(&tree, set[3], &tree);
3759 	check_load(&tree, set[4], ptr);
3760 	check_load(&tree, set[5], &tree);
3761 	check_insert(&tree, set[6], ptr);
3762 	check_load(&tree, set[0], ptr);
3763 	check_load(&tree, set[1], &tree);
3764 	check_load(&tree, set[2], ptr);
3765 	check_load(&tree, set[3], &tree);
3766 	check_load(&tree, set[4], ptr);
3767 	check_load(&tree, set[5], &tree);
3768 	check_load(&tree, set[6], ptr);
3769 	check_insert(&tree, set[7], &tree);
3770 	check_load(&tree, set[0], ptr);
3771 	check_insert(&tree, set[8], ptr);
3772 
3773 	check_insert(&tree, set[9], &tree);
3774 
3775 	check_load(&tree, set[0], ptr);
3776 	check_load(&tree, set[1], &tree);
3777 	check_load(&tree, set[2], ptr);
3778 	check_load(&tree, set[3], &tree);
3779 	check_load(&tree, set[4], ptr);
3780 	check_load(&tree, set[5], &tree);
3781 	check_load(&tree, set[6], ptr);
3782 	check_load(&tree, set[9], &tree);
3783 	mtree_destroy(&tree);
3784 
3785 	mt_init_flags(&tree, 0);
3786 	check_seq(&tree, 16, false);
3787 	mtree_destroy(&tree);
3788 
3789 	mt_init_flags(&tree, 0);
3790 	check_seq(&tree, 1000, true);
3791 	mtree_destroy(&tree);
3792 
3793 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3794 	check_rev_seq(&tree, 1000, true);
3795 	mtree_destroy(&tree);
3796 
3797 	check_lower_bound_split(&tree);
3798 	check_upper_bound_split(&tree);
3799 	check_mid_split(&tree);
3800 
3801 	mt_init_flags(&tree, 0);
3802 	check_next_entry(&tree);
3803 	check_find(&tree);
3804 	check_find_2(&tree);
3805 	mtree_destroy(&tree);
3806 
3807 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3808 	check_prev_entry(&tree);
3809 	mtree_destroy(&tree);
3810 
3811 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3812 	check_gap_combining(&tree);
3813 	mtree_destroy(&tree);
3814 
3815 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3816 	check_node_overwrite(&tree);
3817 	mtree_destroy(&tree);
3818 
3819 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3820 	next_prev_test(&tree);
3821 	mtree_destroy(&tree);
3822 
3823 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3824 	check_spanning_relatives(&tree);
3825 	mtree_destroy(&tree);
3826 
3827 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3828 	check_rev_find(&tree);
3829 	mtree_destroy(&tree);
3830 
3831 	mt_init_flags(&tree, 0);
3832 	check_fuzzer(&tree);
3833 	mtree_destroy(&tree);
3834 
3835 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3836 	check_dup(&tree);
3837 	mtree_destroy(&tree);
3838 
3839 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3840 	check_bnode_min_spanning(&tree);
3841 	mtree_destroy(&tree);
3842 
3843 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3844 	check_empty_area_window(&tree);
3845 	mtree_destroy(&tree);
3846 
3847 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3848 	check_empty_area_fill(&tree);
3849 	mtree_destroy(&tree);
3850 
3851 	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3852 	check_state_handling(&tree);
3853 	mtree_destroy(&tree);
3854 
3855 #if defined(BENCH)
3856 skip:
3857 #endif
3858 	rcu_barrier();
3859 	pr_info("maple_tree: %u of %u tests passed\n",
3860 			atomic_read(&maple_tree_tests_passed),
3861 			atomic_read(&maple_tree_tests_run));
3862 	if (atomic_read(&maple_tree_tests_run) ==
3863 	    atomic_read(&maple_tree_tests_passed))
3864 		return 0;
3865 
3866 	return -EINVAL;
3867 }
3868 
maple_tree_harvest(void)3869 static void __exit maple_tree_harvest(void)
3870 {
3871 
3872 }
3873 
3874 module_init(maple_tree_seed);
3875 module_exit(maple_tree_harvest);
3876 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3877 MODULE_LICENSE("GPL");
3878