1 // SPDX-License-Identifier: GPL-2.0+
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
7 * Any tests that only require the interface of the tree.
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12 #include <linux/rwsem.h>
14 #define MTREE_ALLOC_MAX 0x2000000000000Ul
15 #define CONFIG_MAPLE_SEARCH
16 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
18 #ifndef CONFIG_DEBUG_MAPLE_TREE
19 #define mt_dump(mt, fmt) do {} while (0)
20 #define mt_validate(mt) do {} while (0)
21 #define mt_cache_shrink() do {} while (0)
22 #define mas_dump(mas) do {} while (0)
23 #define mas_wr_dump(mas) do {} while (0)
24 atomic_t maple_tree_tests_run;
25 atomic_t maple_tree_tests_passed;
28 #define MT_BUG_ON(__tree, __x) do { \
29 atomic_inc(&maple_tree_tests_run); \
31 pr_info("BUG at %s:%d (%u)\n", \
32 __func__, __LINE__, __x); \
33 pr_info("Pass: %u Run:%u\n", \
34 atomic_read(&maple_tree_tests_passed), \
35 atomic_read(&maple_tree_tests_run)); \
37 atomic_inc(&maple_tree_tests_passed); \
42 /* #define BENCH_SLOT_STORE */
43 /* #define BENCH_NODE_STORE */
44 /* #define BENCH_AWALK */
45 /* #define BENCH_WALK */
46 /* #define BENCH_MT_FOR_EACH */
47 /* #define BENCH_FORK */
48 /* #define BENCH_MAS_FOR_EACH */
49 /* #define BENCH_MAS_PREV */
52 #define mt_set_non_kernel(x) do {} while (0)
53 #define mt_zero_nr_tallocated(x) do {} while (0)
55 #define cond_resched() do {} while (0)
57 static int __init mtree_insert_index(struct maple_tree *mt,
58 unsigned long index, gfp_t gfp)
60 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
63 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
65 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
66 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
69 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
72 return mtree_insert(mt, index, ptr, GFP_KERNEL);
75 static int __init mtree_test_store_range(struct maple_tree *mt,
76 unsigned long start, unsigned long end, void *ptr)
78 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
81 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
84 return mtree_test_store_range(mt, start, start, ptr);
87 static int __init mtree_test_insert_range(struct maple_tree *mt,
88 unsigned long start, unsigned long end, void *ptr)
90 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
93 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
95 return mtree_load(mt, index);
98 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
100 return mtree_erase(mt, index);
103 #if defined(CONFIG_64BIT)
104 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
105 unsigned long start, unsigned long end, unsigned long size,
106 unsigned long expected, int eret, void *ptr)
109 unsigned long result = expected + 1;
112 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
114 MT_BUG_ON(mt, ret != eret);
118 MT_BUG_ON(mt, result != expected);
121 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
122 unsigned long start, unsigned long end, unsigned long size,
123 unsigned long expected, int eret, void *ptr)
126 unsigned long result = expected + 1;
129 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
131 MT_BUG_ON(mt, ret != eret);
135 MT_BUG_ON(mt, result != expected);
139 static noinline void __init check_load(struct maple_tree *mt,
140 unsigned long index, void *ptr)
142 void *ret = mtree_test_load(mt, index);
145 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
146 MT_BUG_ON(mt, ret != ptr);
149 static noinline void __init check_store_range(struct maple_tree *mt,
150 unsigned long start, unsigned long end, void *ptr, int expected)
155 ret = mtree_test_store_range(mt, start, end, ptr);
156 MT_BUG_ON(mt, ret != expected);
161 for (i = start; i <= end; i++)
162 check_load(mt, i, ptr);
165 static noinline void __init check_insert_range(struct maple_tree *mt,
166 unsigned long start, unsigned long end, void *ptr, int expected)
171 ret = mtree_test_insert_range(mt, start, end, ptr);
172 MT_BUG_ON(mt, ret != expected);
177 for (i = start; i <= end; i++)
178 check_load(mt, i, ptr);
181 static noinline void __init check_insert(struct maple_tree *mt,
182 unsigned long index, void *ptr)
186 ret = mtree_test_insert(mt, index, ptr);
187 MT_BUG_ON(mt, ret != 0);
190 static noinline void __init check_dup_insert(struct maple_tree *mt,
191 unsigned long index, void *ptr)
195 ret = mtree_test_insert(mt, index, ptr);
196 MT_BUG_ON(mt, ret != -EEXIST);
200 static noinline void __init check_index_load(struct maple_tree *mt,
203 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
206 static inline __init int not_empty(struct maple_node *node)
213 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
221 static noinline void __init check_rev_seq(struct maple_tree *mt,
222 unsigned long max, bool verbose)
224 unsigned long i = max, j;
226 MT_BUG_ON(mt, !mtree_empty(mt));
228 mt_zero_nr_tallocated();
230 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
231 for (j = i; j <= max; j++)
232 check_index_load(mt, j);
234 check_load(mt, i - 1, NULL);
236 MT_BUG_ON(mt, !mt_height(mt));
238 MT_BUG_ON(mt, !mt_height(mt));
241 check_load(mt, max + 1, NULL);
246 mt_dump(mt, mt_dump_dec);
247 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
248 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
254 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
259 MT_BUG_ON(mt, !mtree_empty(mt));
261 mt_zero_nr_tallocated();
262 for (i = 0; i <= max; i++) {
263 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
264 for (j = 0; j <= i; j++)
265 check_index_load(mt, j);
268 MT_BUG_ON(mt, !mt_height(mt));
269 check_load(mt, i + 1, NULL);
275 mt_dump(mt, mt_dump_dec);
276 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
277 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
283 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
286 unsigned long huge = 4000UL * 1000 * 1000;
291 check_insert(mt, i, (void *) i);
292 for (j = huge; j >= i; j /= 2) {
293 check_load(mt, j-1, NULL);
294 check_load(mt, j, (void *) j);
295 check_load(mt, j+1, NULL);
302 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
304 MT_BUG_ON(mt, !mtree_empty(mt));
305 check_lb_not_empty(mt);
308 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
313 MT_BUG_ON(mt, !mtree_empty(mt));
318 huge = 4000UL * 1000 * 1000;
322 check_insert(mt, i, (void *) i);
323 for (j = i; j >= huge; j *= 2) {
324 check_load(mt, j-1, NULL);
325 check_load(mt, j, (void *) j);
326 check_load(mt, j+1, NULL);
333 static noinline void __init check_mid_split(struct maple_tree *mt)
335 unsigned long huge = 8000UL * 1000 * 1000;
337 check_insert(mt, huge, (void *) huge);
338 check_insert(mt, 0, xa_mk_value(0));
339 check_lb_not_empty(mt);
342 static noinline void __init check_rev_find(struct maple_tree *mt)
344 int i, nr_entries = 200;
346 MA_STATE(mas, mt, 0, 0);
348 for (i = 0; i <= nr_entries; i++)
349 mtree_store_range(mt, i*10, i*10 + 5,
350 xa_mk_value(i), GFP_KERNEL);
354 val = mas_find_rev(&mas, 1000);
355 MT_BUG_ON(mt, val != xa_mk_value(100));
356 val = mas_find_rev(&mas, 1000);
357 MT_BUG_ON(mt, val != NULL);
360 val = mas_find_rev(&mas, 997);
361 MT_BUG_ON(mt, val != NULL);
364 val = mas_find_rev(&mas, 900);
365 MT_BUG_ON(mt, val != xa_mk_value(100));
366 val = mas_find_rev(&mas, 900);
367 MT_BUG_ON(mt, val != xa_mk_value(99));
370 val = mas_find_rev(&mas, 0);
371 MT_BUG_ON(mt, val != xa_mk_value(2));
372 val = mas_find_rev(&mas, 0);
373 MT_BUG_ON(mt, val != xa_mk_value(1));
374 val = mas_find_rev(&mas, 0);
375 MT_BUG_ON(mt, val != xa_mk_value(0));
376 val = mas_find_rev(&mas, 0);
377 MT_BUG_ON(mt, val != NULL);
381 static noinline void __init check_find(struct maple_tree *mt)
383 unsigned long val = 0;
387 unsigned long last = 0, index = 0;
388 void *entry, *entry2;
390 MA_STATE(mas, mt, 0, 0);
393 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
395 #if defined(CONFIG_64BIT)
396 top = 4398046511104UL;
407 for (int i = 0; i <= count; i++) {
409 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
411 MT_BUG_ON(mt, mtree_insert(mt, val,
412 XA_ZERO_ENTRY, GFP_KERNEL));
420 while ((entry = mas_find(&mas, 268435456)) != NULL) {
422 MT_BUG_ON(mt, xa_mk_value(val) != entry);
424 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
427 /* For zero check. */
436 mas_for_each(&mas, entry, ULONG_MAX) {
438 MT_BUG_ON(mt, xa_mk_value(val) != entry);
440 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
442 /* For zero check. */
452 mas_for_each(&mas, entry, ULONG_MAX) {
454 MT_BUG_ON(mt, xa_mk_value(val) != entry);
456 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
458 /* For zero check. */
469 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
470 mt_for_each(mt, entry, index, max) {
471 MT_BUG_ON(mt, xa_mk_value(val) != entry);
473 if (val == 64) /* Skip zero entry. */
475 /* For zero check. */
483 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
484 mt_for_each(mt, entry, index, ULONG_MAX) {
486 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
488 MT_BUG_ON(mt, xa_mk_value(val) != entry);
490 /* Workaround for 32bit */
491 if ((val << 2) < val)
496 if (val == 64) /* Skip zero entry. */
498 /* For zero check. */
502 MT_BUG_ON(mt, max > 25);
504 mtree_erase_index(mt, ULONG_MAX);
508 entry = mt_find(mt, &index, 512);
509 MT_BUG_ON(mt, xa_mk_value(256) != entry);
513 entry = mt_find(mt, &index, 20);
514 MT_BUG_ON(mt, entry != NULL);
518 /* Insert ULONG_MAX */
519 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
524 mas_for_each(&mas, entry, ULONG_MAX) {
526 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
528 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
530 MT_BUG_ON(mt, xa_mk_value(val) != entry);
532 /* Workaround for 32bit */
533 if ((val << 2) < val)
538 /* For zero check. */
547 mas_set(&mas, 1048576);
549 entry = mas_find(&mas, 1048576);
551 MT_BUG_ON(mas.tree, entry == NULL);
555 * 1. get the expected value, leveraging the existence of an end entry
556 * 2. delete end entry
557 * 3. find the last value but searching for ULONG_MAX and then using
560 /* First, get the expected result. */
563 mas.index = ULONG_MAX; /* start at max.. */
564 entry = mas_find(&mas, ULONG_MAX);
565 entry = mas_prev(&mas, 0);
569 /* Erase the last entry. */
571 mas.index = ULONG_MAX;
572 mas.last = ULONG_MAX;
575 /* Get the previous value from MAS_START */
577 entry2 = mas_prev(&mas, 0);
580 MT_BUG_ON(mt, entry != entry2);
581 MT_BUG_ON(mt, index != mas.index);
582 MT_BUG_ON(mt, last != mas.last);
586 mas.index = ULONG_MAX;
587 mas.last = ULONG_MAX;
588 entry2 = mas_prev(&mas, 0);
589 MT_BUG_ON(mt, entry != entry2);
592 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
598 static noinline void __init check_find_2(struct maple_tree *mt)
603 MA_STATE(mas, mt, 0, 0);
605 mas_for_each(&mas, entry, ULONG_MAX)
609 for (i = 0; i < 256; i++) {
610 mtree_insert_index(mt, i, GFP_KERNEL);
614 mas_for_each(&mas, entry, ULONG_MAX) {
615 MT_BUG_ON(mt, entry != xa_mk_value(j));
619 MT_BUG_ON(mt, j != i + 1);
622 for (i = 0; i < 256; i++) {
623 mtree_erase_index(mt, i);
627 mas_for_each(&mas, entry, ULONG_MAX) {
628 if (xa_is_zero(entry))
631 MT_BUG_ON(mt, entry != xa_mk_value(j));
635 MT_BUG_ON(mt, j != 256);
638 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
642 #if defined(CONFIG_64BIT)
643 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
647 * cat /proc/self/maps | awk '{print $1}'|
648 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
651 static const unsigned long range[] = {
652 /* Inclusive , Exclusive. */
653 0x565234af2000, 0x565234af4000,
654 0x565234af4000, 0x565234af9000,
655 0x565234af9000, 0x565234afb000,
656 0x565234afc000, 0x565234afd000,
657 0x565234afd000, 0x565234afe000,
658 0x565235def000, 0x565235e10000,
659 0x7f36d4bfd000, 0x7f36d4ee2000,
660 0x7f36d4ee2000, 0x7f36d4f04000,
661 0x7f36d4f04000, 0x7f36d504c000,
662 0x7f36d504c000, 0x7f36d5098000,
663 0x7f36d5098000, 0x7f36d5099000,
664 0x7f36d5099000, 0x7f36d509d000,
665 0x7f36d509d000, 0x7f36d509f000,
666 0x7f36d509f000, 0x7f36d50a5000,
667 0x7f36d50b9000, 0x7f36d50db000,
668 0x7f36d50db000, 0x7f36d50dc000,
669 0x7f36d50dc000, 0x7f36d50fa000,
670 0x7f36d50fa000, 0x7f36d5102000,
671 0x7f36d5102000, 0x7f36d5103000,
672 0x7f36d5103000, 0x7f36d5104000,
673 0x7f36d5104000, 0x7f36d5105000,
674 0x7fff5876b000, 0x7fff5878d000,
675 0x7fff5878e000, 0x7fff58791000,
676 0x7fff58791000, 0x7fff58793000,
679 static const unsigned long holes[] = {
681 * Note: start of hole is INCLUSIVE
682 * end of hole is EXCLUSIVE
683 * (opposite of the above table.)
684 * Start of hole, end of hole, size of hole (+1)
686 0x565234afb000, 0x565234afc000, 0x1000,
687 0x565234afe000, 0x565235def000, 0x12F1000,
688 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
692 * req_range consists of 4 values.
696 * 4. number that should be returned.
699 static const unsigned long req_range[] = {
700 0x565234af9000, /* Min */
701 0x7fff58791000, /* Max */
703 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
704 0, /* Return value success. */
707 0x565234AF0 << 12, /* Max */
709 0x565234AEE << 12, /* max - 3. */
710 0, /* Return value success. */
715 562949953421311 << 12,/* First rev hole of size 0x1000 */
716 0, /* Return value success. */
719 0x7F36D5109 << 12, /* Max */
721 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
722 0, /* Return value success. */
733 18446744073709551615UL,
734 562915594369134UL << 12,
738 /* Single space test. */
746 int i, range_count = ARRAY_SIZE(range);
747 int req_range_count = ARRAY_SIZE(req_range);
748 unsigned long min = 0;
750 MA_STATE(mas, mt, 0, 0);
752 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
754 #define DEBUG_REV_RANGE 0
755 for (i = 0; i < range_count; i += 2) {
756 /* Inclusive, Inclusive (with the -1) */
759 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
760 (range[i + 1] >> 12) - 1);
762 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
763 xa_mk_value(range[i] >> 12), 0);
769 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
771 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
772 min, holes[i+1]>>12, holes[i+2]>>12,
775 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
779 pr_debug("Found %lu %lu\n", mas.index, mas.last);
780 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
783 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
784 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
785 min = holes[i+1] >> 12;
790 for (i = 0; i < req_range_count; i += 5) {
792 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
793 i, req_range[i] >> 12,
794 (req_range[i + 1] >> 12),
795 req_range[i+2] >> 12,
796 req_range[i+3] >> 12);
798 check_mtree_alloc_rrange(mt,
799 req_range[i] >> 12, /* start */
800 req_range[i+1] >> 12, /* end */
801 req_range[i+2] >> 12, /* size */
802 req_range[i+3] >> 12, /* expected address */
803 req_range[i+4], /* expected return */
804 xa_mk_value(req_range[i] >> 12)); /* pointer */
808 mt_set_non_kernel(1);
809 mtree_erase(mt, 34148798727); /* create a deleted range. */
810 mtree_erase(mt, 34148798725);
811 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
817 static noinline void __init check_alloc_range(struct maple_tree *mt)
821 * cat /proc/self/maps|awk '{print $1}'|
822 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
825 static const unsigned long range[] = {
826 /* Inclusive , Exclusive. */
827 0x565234af2000, 0x565234af4000,
828 0x565234af4000, 0x565234af9000,
829 0x565234af9000, 0x565234afb000,
830 0x565234afc000, 0x565234afd000,
831 0x565234afd000, 0x565234afe000,
832 0x565235def000, 0x565235e10000,
833 0x7f36d4bfd000, 0x7f36d4ee2000,
834 0x7f36d4ee2000, 0x7f36d4f04000,
835 0x7f36d4f04000, 0x7f36d504c000,
836 0x7f36d504c000, 0x7f36d5098000,
837 0x7f36d5098000, 0x7f36d5099000,
838 0x7f36d5099000, 0x7f36d509d000,
839 0x7f36d509d000, 0x7f36d509f000,
840 0x7f36d509f000, 0x7f36d50a5000,
841 0x7f36d50b9000, 0x7f36d50db000,
842 0x7f36d50db000, 0x7f36d50dc000,
843 0x7f36d50dc000, 0x7f36d50fa000,
844 0x7f36d50fa000, 0x7f36d5102000,
845 0x7f36d5102000, 0x7f36d5103000,
846 0x7f36d5103000, 0x7f36d5104000,
847 0x7f36d5104000, 0x7f36d5105000,
848 0x7fff5876b000, 0x7fff5878d000,
849 0x7fff5878e000, 0x7fff58791000,
850 0x7fff58791000, 0x7fff58793000,
852 static const unsigned long holes[] = {
853 /* Start of hole, end of hole, size of hole (+1) */
854 0x565234afb000, 0x565234afc000, 0x1000,
855 0x565234afe000, 0x565235def000, 0x12F1000,
856 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
860 * req_range consists of 4 values.
864 * 4. number that should be returned.
867 static const unsigned long req_range[] = {
868 0x565234af9000, /* Min */
869 0x7fff58791000, /* Max */
871 0x565234afb000, /* First hole in our data of size 1000. */
872 0, /* Return value success. */
875 0x7fff58791000, /* Max */
877 0x0, /* First hole in our data of size 2000. */
878 0, /* Return value success. */
881 34148797436 << 12, /* Min */
882 0x7fff587AF000, /* Max */
884 34148798629 << 12, /* Expected location */
885 0, /* Return value success. */
888 34148798623 << 12, /* Min */
889 34148798683 << 12, /* Max */
891 0, /* Expected location */
892 -EBUSY, /* Return value failed. */
894 /* Test filling entire gap. */
895 34148798623 << 12, /* Min */
896 0x7fff587AF000, /* Max */
898 34148798632 << 12, /* Expected location */
899 0, /* Return value success. */
901 /* Test walking off the end of root. */
905 0, /* Expected location */
906 -EBUSY, /* Return value failure. */
908 /* Test looking for too large a hole across entire range. */
911 4503599618982063UL << 12, /* Size */
912 34359052178 << 12, /* Expected location */
913 -EBUSY, /* Return failure. */
915 /* Test a single entry */
916 34148798648 << 12, /* Min */
917 34148798648 << 12, /* Max */
918 4096, /* Size of 1 */
919 34148798648 << 12, /* Location is the same as min/max */
922 int i, range_count = ARRAY_SIZE(range);
923 int req_range_count = ARRAY_SIZE(req_range);
924 unsigned long min = 0x565234af2000;
925 MA_STATE(mas, mt, 0, 0);
927 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
929 for (i = 0; i < range_count; i += 2) {
930 #define DEBUG_ALLOC_RANGE 0
931 #if DEBUG_ALLOC_RANGE
932 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
933 (range[i + 1] >> 12) - 1);
934 mt_dump(mt, mt_dump_hex);
936 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
937 xa_mk_value(range[i] >> 12), 0);
944 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
946 #if DEBUG_ALLOC_RANGE
947 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
948 holes[i+1] >> 12, holes[i+2] >> 12,
951 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
954 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
959 for (i = 0; i < req_range_count; i += 5) {
960 #if DEBUG_ALLOC_RANGE
961 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
962 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
963 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
964 req_range[i], req_range[i+1]);
966 check_mtree_alloc_range(mt,
967 req_range[i] >> 12, /* start */
968 req_range[i+1] >> 12, /* end */
969 req_range[i+2] >> 12, /* size */
970 req_range[i+3] >> 12, /* expected address */
971 req_range[i+4], /* expected return */
972 xa_mk_value(req_range[i] >> 12)); /* pointer */
974 #if DEBUG_ALLOC_RANGE
975 mt_dump(mt, mt_dump_hex);
983 static noinline void __init check_ranges(struct maple_tree *mt)
986 static const unsigned long r[] = {
989 17, 22, /* Overlaps previous range. */
996 MT_BUG_ON(mt, !mtree_empty(mt));
997 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
998 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
999 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1000 MT_BUG_ON(mt, !mt_height(mt));
1002 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1003 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1004 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1005 MT_BUG_ON(mt, !mt_height(mt));
1007 MT_BUG_ON(mt, mt_height(mt));
1009 check_seq(mt, 50, false);
1010 mt_set_non_kernel(4);
1011 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1012 MT_BUG_ON(mt, !mt_height(mt));
1015 /* Create tree of 1-100 */
1016 check_seq(mt, 100, false);
1018 mt_set_non_kernel(10);
1019 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1020 MT_BUG_ON(mt, !mt_height(mt));
1023 /* Create tree of 1-200 */
1024 check_seq(mt, 200, false);
1026 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1027 MT_BUG_ON(mt, !mt_height(mt));
1030 check_seq(mt, 30, false);
1031 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1032 MT_BUG_ON(mt, !mt_height(mt));
1035 /* Overwrite across multiple levels. */
1036 /* Create tree of 1-400 */
1037 check_seq(mt, 400, false);
1038 mt_set_non_kernel(50);
1040 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1041 mt_set_non_kernel(50);
1042 mtree_test_erase(mt, 140);
1043 mtree_test_erase(mt, 141);
1044 mtree_test_erase(mt, 142);
1045 mtree_test_erase(mt, 143);
1046 mtree_test_erase(mt, 130);
1047 mtree_test_erase(mt, 131);
1048 mtree_test_erase(mt, 132);
1049 mtree_test_erase(mt, 133);
1050 mtree_test_erase(mt, 134);
1051 mtree_test_erase(mt, 135);
1052 check_load(mt, r[12], xa_mk_value(r[12]));
1053 check_load(mt, r[13], xa_mk_value(r[12]));
1054 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1055 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1056 check_load(mt, 135, NULL);
1057 check_load(mt, 140, NULL);
1058 mt_set_non_kernel(0);
1059 MT_BUG_ON(mt, !mt_height(mt));
1064 /* Overwrite multiple levels at the end of the tree (slot 7) */
1065 mt_set_non_kernel(50);
1066 check_seq(mt, 400, false);
1067 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1068 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1070 check_load(mt, 346, xa_mk_value(346));
1071 for (i = 347; i <= 352; i++)
1072 check_load(mt, i, xa_mk_value(347));
1073 for (i = 353; i <= 361; i++)
1074 check_load(mt, i, xa_mk_value(353));
1075 check_load(mt, 362, xa_mk_value(362));
1076 mt_set_non_kernel(0);
1077 MT_BUG_ON(mt, !mt_height(mt));
1080 mt_set_non_kernel(50);
1081 check_seq(mt, 400, false);
1082 check_store_range(mt, 352, 364, NULL, 0);
1083 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1084 check_load(mt, 350, xa_mk_value(350));
1085 check_load(mt, 351, xa_mk_value(352));
1086 for (i = 352; i <= 363; i++)
1087 check_load(mt, i, xa_mk_value(352));
1088 check_load(mt, 364, NULL);
1089 check_load(mt, 365, xa_mk_value(365));
1090 mt_set_non_kernel(0);
1091 MT_BUG_ON(mt, !mt_height(mt));
1094 mt_set_non_kernel(5);
1095 check_seq(mt, 400, false);
1096 check_store_range(mt, 352, 364, NULL, 0);
1097 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1098 check_load(mt, 350, xa_mk_value(350));
1099 check_load(mt, 351, xa_mk_value(352));
1100 for (i = 352; i <= 364; i++)
1101 check_load(mt, i, xa_mk_value(352));
1102 check_load(mt, 365, xa_mk_value(365));
1103 mt_set_non_kernel(0);
1104 MT_BUG_ON(mt, !mt_height(mt));
1108 mt_set_non_kernel(50);
1109 check_seq(mt, 400, false);
1110 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1111 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1112 mt_set_non_kernel(0);
1114 MT_BUG_ON(mt, !mt_height(mt));
1117 * Interesting cases:
1118 * 1. Overwrite the end of a node and end in the first entry of the next
1120 * 2. Split a single range
1121 * 3. Overwrite the start of a range
1122 * 4. Overwrite the end of a range
1123 * 5. Overwrite the entire range
1124 * 6. Overwrite a range that causes multiple parent nodes to be
1126 * 7. Overwrite a range that causes multiple parent nodes and part of
1127 * root to be combined
1128 * 8. Overwrite the whole tree
1129 * 9. Try to overwrite the zero entry of an alloc tree.
1130 * 10. Write a range larger than a nodes current pivot
1133 mt_set_non_kernel(50);
1134 for (i = 0; i <= 500; i++) {
1137 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1139 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1140 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1141 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1142 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1143 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1145 mt_set_non_kernel(0);
1147 mt_set_non_kernel(50);
1148 for (i = 0; i <= 500; i++) {
1151 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1153 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1154 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1155 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1156 check_store_range(mt, 2460, 2470, NULL, 0);
1157 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1158 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1159 mt_set_non_kernel(0);
1160 MT_BUG_ON(mt, !mt_height(mt));
1163 /* Check in-place modifications */
1164 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1165 /* Append to the start of last range */
1166 mt_set_non_kernel(50);
1167 for (i = 0; i <= 500; i++) {
1170 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1173 /* Append to the last range without touching any boundaries */
1174 for (i = 0; i < 10; i++) {
1177 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1180 /* Append to the end of last range */
1182 for (i = 0; i < 10; i++) {
1184 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1185 xa_mk_value(val)) != 0);
1188 /* Overwriting the range and over a part of the next range */
1189 for (i = 10; i < 30; i += 2) {
1192 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1195 /* Overwriting a part of the range and over the next range */
1196 for (i = 50; i < 70; i += 2) {
1199 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1203 * Expand the range, only partially overwriting the previous and
1206 for (i = 100; i < 130; i += 3) {
1209 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1213 * Expand the range, only partially overwriting the previous and
1214 * next ranges, in RCU mode
1217 for (i = 150; i < 180; i += 3) {
1220 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1223 MT_BUG_ON(mt, !mt_height(mt));
1225 mt_set_non_kernel(0);
1228 /* Test rebalance gaps */
1229 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1230 mt_set_non_kernel(50);
1231 for (i = 0; i <= 50; i++) {
1234 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1236 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1237 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1238 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1239 check_store_range(mt, 240, 249, NULL, 0);
1240 mtree_erase(mt, 200);
1241 mtree_erase(mt, 210);
1242 mtree_erase(mt, 220);
1243 mtree_erase(mt, 230);
1244 mt_set_non_kernel(0);
1245 MT_BUG_ON(mt, !mt_height(mt));
1248 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1249 for (i = 0; i <= 500; i++) {
1252 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1254 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1256 MT_BUG_ON(mt, !mt_height(mt));
1259 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1260 for (i = 0; i <= 500; i++) {
1263 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1265 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1266 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1267 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1268 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1269 check_store_range(mt, 4842, 4849, NULL, 0);
1271 MT_BUG_ON(mt, !mt_height(mt));
1274 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1275 for (i = 0; i <= 1300; i++) {
1278 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1279 MT_BUG_ON(mt, mt_height(mt) >= 4);
1281 /* Cause a 3 child split all the way up the tree. */
1282 for (i = 5; i < 215; i += 10)
1283 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1284 for (i = 5; i < 65; i += 10)
1285 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1287 MT_BUG_ON(mt, mt_height(mt) >= 4);
1288 for (i = 5; i < 45; i += 10)
1289 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1291 MT_BUG_ON(mt, mt_height(mt) < 4);
1295 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1296 for (i = 0; i <= 1200; i++) {
1299 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1300 MT_BUG_ON(mt, mt_height(mt) >= 4);
1302 /* Fill parents and leaves before split. */
1303 for (i = 5; i < 455; i += 10)
1304 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1306 for (i = 1; i < 16; i++)
1307 check_store_range(mt, 8185 + i, 8185 + i + 1,
1308 xa_mk_value(8185+i), 0);
1309 MT_BUG_ON(mt, mt_height(mt) >= 4);
1310 /* triple split across multiple levels. */
1311 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1313 MT_BUG_ON(mt, mt_height(mt) != 4);
1316 static noinline void __init check_next_entry(struct maple_tree *mt)
1319 unsigned long limit = 30, i = 0;
1320 MA_STATE(mas, mt, i, i);
1322 MT_BUG_ON(mt, !mtree_empty(mt));
1324 check_seq(mt, limit, false);
1327 /* Check the first one and get ma_state in the correct state. */
1328 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1329 for ( ; i <= limit + 1; i++) {
1330 entry = mas_next(&mas, limit);
1332 MT_BUG_ON(mt, entry != NULL);
1334 MT_BUG_ON(mt, xa_mk_value(i) != entry);
1340 static noinline void __init check_prev_entry(struct maple_tree *mt)
1342 unsigned long index = 16;
1346 MA_STATE(mas, mt, index, index);
1348 MT_BUG_ON(mt, !mtree_empty(mt));
1349 check_seq(mt, 30, false);
1352 value = mas_find(&mas, ULONG_MAX);
1353 MT_BUG_ON(mt, value != xa_mk_value(index));
1354 value = mas_prev(&mas, 0);
1355 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1359 /* Check limits on prev */
1360 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1362 for (i = 0; i <= index; i++) {
1363 mas_set_range(&mas, i*10, i*10+5);
1364 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1368 value = mas_walk(&mas);
1369 MT_BUG_ON(mt, value != xa_mk_value(2));
1371 value = mas_prev(&mas, 19);
1372 MT_BUG_ON(mt, value != NULL);
1375 value = mas_walk(&mas);
1376 MT_BUG_ON(mt, value != xa_mk_value(8));
1378 value = mas_prev(&mas, 76);
1379 MT_BUG_ON(mt, value != NULL);
1384 static noinline void __init check_root_expand(struct maple_tree *mt)
1386 MA_STATE(mas, mt, 0, 0);
1392 ptr = mas_walk(&mas);
1393 MT_BUG_ON(mt, mas.index != 0);
1394 MT_BUG_ON(mt, ptr != NULL);
1395 MT_BUG_ON(mt, mas.index != 0);
1396 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1398 ptr = &check_prev_entry;
1400 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1403 ptr = mas_walk(&mas);
1404 MT_BUG_ON(mt, ptr != NULL);
1407 ptr = mas_walk(&mas);
1408 MT_BUG_ON(mt, ptr != &check_prev_entry);
1411 ptr = mas_walk(&mas);
1412 MT_BUG_ON(mt, ptr != NULL);
1417 mt_init_flags(mt, 0);
1421 ptr = &check_prev_entry;
1422 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1425 ptr = mas_walk(&mas);
1426 MT_BUG_ON(mt, ptr != NULL);
1427 MT_BUG_ON(mt, mas.index != 1);
1428 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1430 mas_set_range(&mas, 0, 100);
1431 ptr = mas_walk(&mas);
1432 MT_BUG_ON(mt, ptr != &check_prev_entry);
1433 MT_BUG_ON(mt, mas.last != 0);
1437 mt_init_flags(mt, 0);
1441 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1442 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1443 ptr = mas_next(&mas, ULONG_MAX);
1444 MT_BUG_ON(mt, ptr != NULL);
1445 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1448 ptr = mas_prev(&mas, 0);
1449 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1450 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1456 mt_init_flags(mt, 0);
1459 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1460 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1461 ptr = mas_next(&mas, ULONG_MAX);
1462 MT_BUG_ON(mt, ptr != NULL);
1463 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1466 ptr = mas_prev(&mas, 0);
1467 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1468 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1474 static noinline void __init check_gap_combining(struct maple_tree *mt)
1476 struct maple_enode *mn1, *mn2;
1478 unsigned long singletons = 100;
1479 static const unsigned long *seq100;
1480 static const unsigned long seq100_64[] = {
1494 static const unsigned long seq100_32[] = {
1508 static const unsigned long seq2000[] = {
1512 static const unsigned long seq400[] = {
1514 256, 260, 266, 270, 275, 280, 290, 398,
1518 unsigned long index;
1520 MA_STATE(mas, mt, 0, 0);
1528 mas_set(&mas, index);
1529 MT_BUG_ON(mt, !mtree_empty(mt));
1530 check_seq(mt, singletons, false); /* create 100 singletons. */
1532 mt_set_non_kernel(1);
1533 mtree_test_erase(mt, seq100[2]);
1534 check_load(mt, seq100[2], NULL);
1535 mtree_test_erase(mt, seq100[1]);
1536 check_load(mt, seq100[1], NULL);
1539 entry = mas_find(&mas, ULONG_MAX);
1540 MT_BUG_ON(mt, entry != xa_mk_value(index));
1542 mas_next(&mas, ULONG_MAX);
1543 entry = mas_next(&mas, ULONG_MAX);
1544 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1546 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1549 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1550 * seq100[4]. Search for the gap.
1552 mt_set_non_kernel(1);
1554 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1556 MT_BUG_ON(mt, mas.index != index + 1);
1559 mtree_test_erase(mt, seq100[6]);
1560 check_load(mt, seq100[6], NULL);
1561 mtree_test_erase(mt, seq100[7]);
1562 check_load(mt, seq100[7], NULL);
1563 mtree_test_erase(mt, seq100[8]);
1570 entry = mas_find(&mas, ULONG_MAX);
1571 MT_BUG_ON(mt, entry != xa_mk_value(index));
1573 entry = mas_next(&mas, ULONG_MAX);
1574 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1575 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1577 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1580 * At this point, there is a gap of 3 at seq100[6]. Find it by
1581 * searching 20 - 50 for size 3.
1584 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1586 MT_BUG_ON(mt, mas.index != seq100[6]);
1589 mt_set_non_kernel(1);
1590 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1591 check_load(mt, seq100[13], NULL);
1592 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1593 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1594 check_load(mt, seq100[13], NULL);
1595 check_load(mt, seq100[14], NULL);
1599 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1601 MT_BUG_ON(mt, mas.index != seq100[13]);
1606 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1609 mt_set_non_kernel(2);
1610 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1611 mtree_test_erase(mt, seq100[15]);
1614 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1617 MT_BUG_ON(mt, mas.index != seq100[18]);
1621 /* seq 2000 tests are for multi-level tree gaps */
1622 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1623 check_seq(mt, 2000, false);
1624 mt_set_non_kernel(1);
1625 mtree_test_erase(mt, seq2000[0]);
1626 mtree_test_erase(mt, seq2000[1]);
1628 mt_set_non_kernel(2);
1631 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1633 MT_BUG_ON(mt, mas.index != seq2000[1]);
1638 /* seq 400 tests rebalancing over two levels. */
1639 mt_set_non_kernel(99);
1640 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1641 check_seq(mt, 400, false);
1642 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1643 mt_set_non_kernel(0);
1646 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1647 check_seq(mt, 400, false);
1648 mt_set_non_kernel(50);
1649 mtree_test_store_range(mt, seq400[2], seq400[9],
1650 xa_mk_value(seq400[2]));
1651 mtree_test_store_range(mt, seq400[3], seq400[9],
1652 xa_mk_value(seq400[3]));
1653 mtree_test_store_range(mt, seq400[4], seq400[9],
1654 xa_mk_value(seq400[4]));
1655 mtree_test_store_range(mt, seq400[5], seq400[9],
1656 xa_mk_value(seq400[5]));
1657 mtree_test_store_range(mt, seq400[0], seq400[9],
1658 xa_mk_value(seq400[0]));
1659 mtree_test_store_range(mt, seq400[6], seq400[9],
1660 xa_mk_value(seq400[6]));
1661 mtree_test_store_range(mt, seq400[7], seq400[9],
1662 xa_mk_value(seq400[7]));
1663 mtree_test_store_range(mt, seq400[8], seq400[9],
1664 xa_mk_value(seq400[8]));
1665 mtree_test_store_range(mt, seq400[10], seq400[11],
1666 xa_mk_value(seq400[10]));
1668 mt_set_non_kernel(0);
1671 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1675 for (i = 0; i < max; i++)
1676 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1678 mtree_test_store_range(mt, 319951, 367950, NULL);
1679 /*mt_dump(mt, mt_dump_dec); */
1683 #if defined(BENCH_SLOT_STORE)
1684 static noinline void __init bench_slot_store(struct maple_tree *mt)
1686 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1688 for (i = 0; i < max; i += 10)
1689 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1691 for (i = 0; i < count; i++) {
1692 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1693 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1699 #if defined(BENCH_NODE_STORE)
1700 static noinline void __init bench_node_store(struct maple_tree *mt)
1702 int i, overwrite = 76, max = 240, count = 20000000;
1704 for (i = 0; i < max; i += 10)
1705 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1707 for (i = 0; i < count; i++) {
1708 mtree_store_range(mt, overwrite, overwrite + 15,
1709 xa_mk_value(overwrite), GFP_KERNEL);
1712 if (overwrite >= 135)
1718 #if defined(BENCH_AWALK)
1719 static noinline void __init bench_awalk(struct maple_tree *mt)
1721 int i, max = 2500, count = 50000000;
1722 MA_STATE(mas, mt, 1470, 1470);
1724 for (i = 0; i < max; i += 10)
1725 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1727 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1729 for (i = 0; i < count; i++) {
1730 mas_empty_area_rev(&mas, 0, 2000, 10);
1735 #if defined(BENCH_WALK)
1736 static noinline void __init bench_walk(struct maple_tree *mt)
1738 int i, max = 2500, count = 550000000;
1739 MA_STATE(mas, mt, 1470, 1470);
1741 for (i = 0; i < max; i += 10)
1742 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1744 for (i = 0; i < count; i++) {
1752 #if defined(BENCH_MT_FOR_EACH)
1753 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1755 int i, count = 1000000;
1756 unsigned long max = 2500, index = 0;
1759 for (i = 0; i < max; i += 5)
1760 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1762 for (i = 0; i < count; i++) {
1763 unsigned long j = 0;
1765 mt_for_each(mt, entry, index, max) {
1766 MT_BUG_ON(mt, entry != xa_mk_value(j));
1776 #if defined(BENCH_MAS_FOR_EACH)
1777 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1779 int i, count = 1000000;
1780 unsigned long max = 2500;
1782 MA_STATE(mas, mt, 0, 0);
1784 for (i = 0; i < max; i += 5) {
1789 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1793 for (i = 0; i < count; i++) {
1794 unsigned long j = 0;
1796 mas_for_each(&mas, entry, max) {
1797 MT_BUG_ON(mt, entry != xa_mk_value(j));
1806 #if defined(BENCH_MAS_PREV)
1807 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1809 int i, count = 1000000;
1810 unsigned long max = 2500;
1812 MA_STATE(mas, mt, 0, 0);
1814 for (i = 0; i < max; i += 5) {
1819 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1823 for (i = 0; i < count; i++) {
1824 unsigned long j = 2495;
1826 mas_set(&mas, ULONG_MAX);
1827 while ((entry = mas_prev(&mas, 0)) != NULL) {
1828 MT_BUG_ON(mt, entry != xa_mk_value(j));
1836 /* check_forking - simulate the kernel forking sequence with the tree. */
1837 static noinline void __init check_forking(struct maple_tree *mt)
1840 struct maple_tree newmt;
1841 int i, nr_entries = 134;
1843 MA_STATE(mas, mt, 0, 0);
1844 MA_STATE(newmas, mt, 0, 0);
1845 struct rw_semaphore newmt_lock;
1847 init_rwsem(&newmt_lock);
1849 for (i = 0; i <= nr_entries; i++)
1850 mtree_store_range(mt, i*10, i*10 + 5,
1851 xa_mk_value(i), GFP_KERNEL);
1853 mt_set_non_kernel(99999);
1854 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1855 mt_set_external_lock(&newmt, &newmt_lock);
1856 newmas.tree = &newmt;
1859 down_write(&newmt_lock);
1862 if (mas_expected_entries(&newmas, nr_entries)) {
1867 mas_for_each(&mas, val, ULONG_MAX) {
1868 newmas.index = mas.index;
1869 newmas.last = mas.last;
1870 mas_store(&newmas, val);
1873 mas_destroy(&newmas);
1874 mt_validate(&newmt);
1875 mt_set_non_kernel(0);
1876 __mt_destroy(&newmt);
1877 up_write(&newmt_lock);
1880 static noinline void __init check_iteration(struct maple_tree *mt)
1882 int i, nr_entries = 125;
1884 MA_STATE(mas, mt, 0, 0);
1886 for (i = 0; i <= nr_entries; i++)
1887 mtree_store_range(mt, i * 10, i * 10 + 9,
1888 xa_mk_value(i), GFP_KERNEL);
1890 mt_set_non_kernel(99999);
1894 mas_for_each(&mas, val, 925) {
1895 MT_BUG_ON(mt, mas.index != i * 10);
1896 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1897 /* Overwrite end of entry 92 */
1901 mas_store(&mas, val);
1905 /* Ensure mas_find() gets the next value */
1906 val = mas_find(&mas, ULONG_MAX);
1907 MT_BUG_ON(mt, val != xa_mk_value(i));
1911 mas_for_each(&mas, val, 785) {
1912 MT_BUG_ON(mt, mas.index != i * 10);
1913 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1914 /* Overwrite start of entry 78 */
1918 mas_store(&mas, val);
1923 val = mas_find(&mas, ULONG_MAX);
1924 MT_BUG_ON(mt, val != xa_mk_value(i));
1928 mas_for_each(&mas, val, 765) {
1929 MT_BUG_ON(mt, mas.index != i * 10);
1930 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1931 /* Overwrite end of entry 76 and advance to the end */
1935 mas_store(&mas, val);
1939 /* Make sure the next find returns the one after 765, 766-769 */
1940 val = mas_find(&mas, ULONG_MAX);
1941 MT_BUG_ON(mt, val != xa_mk_value(76));
1944 mt_set_non_kernel(0);
1947 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1950 struct maple_tree newmt;
1951 int i, nr_entries = 135;
1953 MA_STATE(mas, mt, 0, 0);
1954 MA_STATE(newmas, mt, 0, 0);
1956 for (i = 0; i <= nr_entries; i++)
1957 mtree_store_range(mt, i*10, i*10 + 5,
1958 xa_mk_value(i), GFP_KERNEL);
1960 mt_set_non_kernel(99999);
1961 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1962 newmas.tree = &newmt;
1967 mas_for_each(&mas, val, ULONG_MAX) {
1968 newmas.index = mas.index;
1969 newmas.last = mas.last;
1970 mas_store_gfp(&newmas, val, GFP_KERNEL);
1972 mas_unlock(&newmas);
1974 mt_validate(&newmt);
1975 mt_set_non_kernel(0);
1976 mtree_destroy(&newmt);
1979 #if defined(BENCH_FORK)
1980 static noinline void __init bench_forking(struct maple_tree *mt)
1983 struct maple_tree newmt;
1984 int i, nr_entries = 134, nr_fork = 80000;
1986 MA_STATE(mas, mt, 0, 0);
1987 MA_STATE(newmas, mt, 0, 0);
1988 struct rw_semaphore newmt_lock;
1990 init_rwsem(&newmt_lock);
1991 mt_set_external_lock(&newmt, &newmt_lock);
1993 for (i = 0; i <= nr_entries; i++)
1994 mtree_store_range(mt, i*10, i*10 + 5,
1995 xa_mk_value(i), GFP_KERNEL);
1997 for (i = 0; i < nr_fork; i++) {
1998 mt_set_non_kernel(99999);
1999 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2000 newmas.tree = &newmt;
2006 down_write(&newmt_lock);
2007 if (mas_expected_entries(&newmas, nr_entries)) {
2011 mas_for_each(&mas, val, ULONG_MAX) {
2012 newmas.index = mas.index;
2013 newmas.last = mas.last;
2014 mas_store(&newmas, val);
2016 mas_destroy(&newmas);
2018 mt_validate(&newmt);
2019 mt_set_non_kernel(0);
2020 __mt_destroy(&newmt);
2021 up_write(&newmt_lock);
2026 static noinline void __init next_prev_test(struct maple_tree *mt)
2030 MA_STATE(mas, mt, 0, 0);
2031 struct maple_enode *mn;
2032 static const unsigned long *level2;
2033 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2035 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2037 unsigned long last_index;
2042 last_index = 0x138e;
2049 for (i = 0; i <= nr_entries; i++)
2050 mtree_store_range(mt, i*10, i*10 + 5,
2051 xa_mk_value(i), GFP_KERNEL);
2054 for (i = 0; i <= nr_entries / 2; i++) {
2055 mas_next(&mas, 1000);
2056 if (mas_is_none(&mas))
2063 mas_for_each(&mas, val, 1000) {
2070 mas_for_each(&mas, val, 1000) {
2076 * 680 - 685 = 0x61a00001930c
2078 * 690 - 695 = 0x61a00001930c
2079 * Check simple next/prev
2082 val = mas_walk(&mas);
2083 MT_BUG_ON(mt, val != NULL);
2085 val = mas_next(&mas, 1000);
2086 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2087 MT_BUG_ON(mt, mas.index != 690);
2088 MT_BUG_ON(mt, mas.last != 695);
2090 val = mas_prev(&mas, 0);
2091 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2092 MT_BUG_ON(mt, mas.index != 680);
2093 MT_BUG_ON(mt, mas.last != 685);
2095 val = mas_next(&mas, 1000);
2096 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2097 MT_BUG_ON(mt, mas.index != 690);
2098 MT_BUG_ON(mt, mas.last != 695);
2100 val = mas_next(&mas, 1000);
2101 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2102 MT_BUG_ON(mt, mas.index != 700);
2103 MT_BUG_ON(mt, mas.last != 705);
2105 /* Check across node boundaries of the tree */
2107 val = mas_walk(&mas);
2108 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2109 MT_BUG_ON(mt, mas.index != 70);
2110 MT_BUG_ON(mt, mas.last != 75);
2112 val = mas_next(&mas, 1000);
2113 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2114 MT_BUG_ON(mt, mas.index != 80);
2115 MT_BUG_ON(mt, mas.last != 85);
2117 val = mas_prev(&mas, 70);
2118 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2119 MT_BUG_ON(mt, mas.index != 70);
2120 MT_BUG_ON(mt, mas.last != 75);
2122 /* Check across two levels of the tree */
2124 mas_set(&mas, level2[0]);
2125 val = mas_walk(&mas);
2126 MT_BUG_ON(mt, val != NULL);
2127 val = mas_next(&mas, level2[1]);
2128 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2129 MT_BUG_ON(mt, mas.index != level2[2]);
2130 MT_BUG_ON(mt, mas.last != level2[3]);
2133 val = mas_next(&mas, level2[1]);
2134 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2135 MT_BUG_ON(mt, mas.index != level2[4]);
2136 MT_BUG_ON(mt, mas.last != level2[5]);
2137 MT_BUG_ON(mt, mn == mas.node);
2139 val = mas_prev(&mas, 0);
2140 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2141 MT_BUG_ON(mt, mas.index != level2[2]);
2142 MT_BUG_ON(mt, mas.last != level2[3]);
2144 /* Check running off the end and back on */
2145 mas_set(&mas, nr_entries * 10);
2146 val = mas_walk(&mas);
2147 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2148 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2149 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2151 val = mas_next(&mas, ULONG_MAX);
2152 MT_BUG_ON(mt, val != NULL);
2153 MT_BUG_ON(mt, mas.index != last_index);
2154 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2156 val = mas_prev(&mas, 0);
2157 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2158 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2159 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2161 /* Check running off the start and back on */
2164 val = mas_walk(&mas);
2165 MT_BUG_ON(mt, val != xa_mk_value(1));
2166 MT_BUG_ON(mt, mas.index != 10);
2167 MT_BUG_ON(mt, mas.last != 15);
2169 val = mas_prev(&mas, 0);
2170 MT_BUG_ON(mt, val != xa_mk_value(0));
2171 MT_BUG_ON(mt, mas.index != 0);
2172 MT_BUG_ON(mt, mas.last != 5);
2174 val = mas_prev(&mas, 0);
2175 MT_BUG_ON(mt, val != NULL);
2176 MT_BUG_ON(mt, mas.index != 0);
2177 MT_BUG_ON(mt, mas.last != 5);
2178 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
2182 mas_store(&mas, NULL);
2187 val = mas_prev(&mas, 0);
2188 MT_BUG_ON(mt, val != NULL);
2189 MT_BUG_ON(mt, mas.index != 0);
2190 MT_BUG_ON(mt, mas.last != 9);
2196 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2197 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2200 val = mas_prev(&mas, 4);
2201 MT_BUG_ON(mt, val != NULL);
2207 /* Test spanning writes that require balancing right sibling or right cousin */
2208 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2211 unsigned long i, nr_entries = 1000;
2213 for (i = 0; i <= nr_entries; i++)
2214 mtree_store_range(mt, i*10, i*10 + 5,
2215 xa_mk_value(i), GFP_KERNEL);
2218 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2221 static noinline void __init check_fuzzer(struct maple_tree *mt)
2224 * 1. Causes a spanning rebalance of a single root node.
2225 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2226 * entire right side is consumed.
2228 mtree_test_insert(mt, 88, (void *)0xb1);
2229 mtree_test_insert(mt, 84, (void *)0xa9);
2230 mtree_test_insert(mt, 2, (void *)0x5);
2231 mtree_test_insert(mt, 4, (void *)0x9);
2232 mtree_test_insert(mt, 14, (void *)0x1d);
2233 mtree_test_insert(mt, 7, (void *)0xf);
2234 mtree_test_insert(mt, 12, (void *)0x19);
2235 mtree_test_insert(mt, 18, (void *)0x25);
2236 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2241 * 2. Cause a spanning rebalance of two nodes in root.
2242 * Fixed by setting mast->r->max correctly.
2244 mt_init_flags(mt, 0);
2245 mtree_test_store(mt, 87, (void *)0xaf);
2246 mtree_test_store(mt, 0, (void *)0x1);
2247 mtree_test_load(mt, 4);
2248 mtree_test_insert(mt, 4, (void *)0x9);
2249 mtree_test_store(mt, 8, (void *)0x11);
2250 mtree_test_store(mt, 44, (void *)0x59);
2251 mtree_test_store(mt, 68, (void *)0x89);
2252 mtree_test_store(mt, 2, (void *)0x5);
2253 mtree_test_insert(mt, 43, (void *)0x57);
2254 mtree_test_insert(mt, 24, (void *)0x31);
2255 mtree_test_insert(mt, 844, (void *)0x699);
2256 mtree_test_store(mt, 84, (void *)0xa9);
2257 mtree_test_store(mt, 4, (void *)0x9);
2258 mtree_test_erase(mt, 4);
2259 mtree_test_load(mt, 5);
2260 mtree_test_erase(mt, 0);
2264 * 3. Cause a node overflow on copy
2265 * Fixed by using the correct check for node size in mas_wr_modify()
2266 * Also discovered issue with metadata setting.
2268 mt_init_flags(mt, 0);
2269 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2270 mtree_test_store(mt, 4, (void *)0x9);
2271 mtree_test_erase(mt, 5);
2272 mtree_test_erase(mt, 0);
2273 mtree_test_erase(mt, 4);
2274 mtree_test_store(mt, 5, (void *)0xb);
2275 mtree_test_erase(mt, 5);
2276 mtree_test_store(mt, 5, (void *)0xb);
2277 mtree_test_erase(mt, 5);
2278 mtree_test_erase(mt, 4);
2279 mtree_test_store(mt, 4, (void *)0x9);
2280 mtree_test_store(mt, 444, (void *)0x379);
2281 mtree_test_store(mt, 0, (void *)0x1);
2282 mtree_test_load(mt, 0);
2283 mtree_test_store(mt, 5, (void *)0xb);
2284 mtree_test_erase(mt, 0);
2288 * 4. spanning store failure due to writing incorrect pivot value at
2290 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2293 mt_init_flags(mt, 0);
2294 mtree_test_insert(mt, 261, (void *)0x20b);
2295 mtree_test_store(mt, 516, (void *)0x409);
2296 mtree_test_store(mt, 6, (void *)0xd);
2297 mtree_test_insert(mt, 5, (void *)0xb);
2298 mtree_test_insert(mt, 1256, (void *)0x9d1);
2299 mtree_test_store(mt, 4, (void *)0x9);
2300 mtree_test_erase(mt, 1);
2301 mtree_test_store(mt, 56, (void *)0x71);
2302 mtree_test_insert(mt, 1, (void *)0x3);
2303 mtree_test_store(mt, 24, (void *)0x31);
2304 mtree_test_erase(mt, 1);
2305 mtree_test_insert(mt, 2263, (void *)0x11af);
2306 mtree_test_insert(mt, 446, (void *)0x37d);
2307 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2308 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2312 * 5. mas_wr_extend_null() may overflow slots.
2313 * Fix by checking against wr_mas->node_end.
2315 mt_init_flags(mt, 0);
2316 mtree_test_store(mt, 48, (void *)0x61);
2317 mtree_test_store(mt, 3, (void *)0x7);
2318 mtree_test_load(mt, 0);
2319 mtree_test_store(mt, 88, (void *)0xb1);
2320 mtree_test_store(mt, 81, (void *)0xa3);
2321 mtree_test_insert(mt, 0, (void *)0x1);
2322 mtree_test_insert(mt, 8, (void *)0x11);
2323 mtree_test_insert(mt, 4, (void *)0x9);
2324 mtree_test_insert(mt, 2480, (void *)0x1361);
2325 mtree_test_insert(mt, ULONG_MAX,
2326 (void *)0xffffffffffffffff);
2327 mtree_test_erase(mt, ULONG_MAX);
2331 * 6. When reusing a node with an implied pivot and the node is
2332 * shrinking, old data would be left in the implied slot
2333 * Fixed by checking the last pivot for the mas->max and clear
2334 * accordingly. This only affected the left-most node as that node is
2335 * the only one allowed to end in NULL.
2337 mt_init_flags(mt, 0);
2338 mtree_test_erase(mt, 3);
2339 mtree_test_insert(mt, 22, (void *)0x2d);
2340 mtree_test_insert(mt, 15, (void *)0x1f);
2341 mtree_test_load(mt, 2);
2342 mtree_test_insert(mt, 1, (void *)0x3);
2343 mtree_test_insert(mt, 1, (void *)0x3);
2344 mtree_test_insert(mt, 5, (void *)0xb);
2345 mtree_test_erase(mt, 1);
2346 mtree_test_insert(mt, 1, (void *)0x3);
2347 mtree_test_insert(mt, 4, (void *)0x9);
2348 mtree_test_insert(mt, 1, (void *)0x3);
2349 mtree_test_erase(mt, 1);
2350 mtree_test_insert(mt, 2, (void *)0x5);
2351 mtree_test_insert(mt, 1, (void *)0x3);
2352 mtree_test_erase(mt, 3);
2353 mtree_test_insert(mt, 22, (void *)0x2d);
2354 mtree_test_insert(mt, 15, (void *)0x1f);
2355 mtree_test_insert(mt, 2, (void *)0x5);
2356 mtree_test_insert(mt, 1, (void *)0x3);
2357 mtree_test_insert(mt, 8, (void *)0x11);
2358 mtree_test_load(mt, 2);
2359 mtree_test_insert(mt, 1, (void *)0x3);
2360 mtree_test_insert(mt, 1, (void *)0x3);
2361 mtree_test_store(mt, 1, (void *)0x3);
2362 mtree_test_insert(mt, 5, (void *)0xb);
2363 mtree_test_erase(mt, 1);
2364 mtree_test_insert(mt, 1, (void *)0x3);
2365 mtree_test_insert(mt, 4, (void *)0x9);
2366 mtree_test_insert(mt, 1, (void *)0x3);
2367 mtree_test_erase(mt, 1);
2368 mtree_test_insert(mt, 2, (void *)0x5);
2369 mtree_test_insert(mt, 1, (void *)0x3);
2370 mtree_test_erase(mt, 3);
2371 mtree_test_insert(mt, 22, (void *)0x2d);
2372 mtree_test_insert(mt, 15, (void *)0x1f);
2373 mtree_test_insert(mt, 2, (void *)0x5);
2374 mtree_test_insert(mt, 1, (void *)0x3);
2375 mtree_test_insert(mt, 8, (void *)0x11);
2376 mtree_test_insert(mt, 12, (void *)0x19);
2377 mtree_test_erase(mt, 1);
2378 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2379 mtree_test_erase(mt, 62);
2380 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2381 mtree_test_insert(mt, 11, (void *)0x17);
2382 mtree_test_insert(mt, 3, (void *)0x7);
2383 mtree_test_insert(mt, 3, (void *)0x7);
2384 mtree_test_store(mt, 62, (void *)0x7d);
2385 mtree_test_erase(mt, 62);
2386 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2387 mtree_test_erase(mt, 1);
2388 mtree_test_insert(mt, 22, (void *)0x2d);
2389 mtree_test_insert(mt, 12, (void *)0x19);
2390 mtree_test_erase(mt, 1);
2391 mtree_test_insert(mt, 3, (void *)0x7);
2392 mtree_test_store(mt, 62, (void *)0x7d);
2393 mtree_test_erase(mt, 62);
2394 mtree_test_insert(mt, 122, (void *)0xf5);
2395 mtree_test_store(mt, 3, (void *)0x7);
2396 mtree_test_insert(mt, 0, (void *)0x1);
2397 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2398 mtree_test_insert(mt, 85, (void *)0xab);
2399 mtree_test_insert(mt, 72, (void *)0x91);
2400 mtree_test_insert(mt, 81, (void *)0xa3);
2401 mtree_test_insert(mt, 726, (void *)0x5ad);
2402 mtree_test_insert(mt, 0, (void *)0x1);
2403 mtree_test_insert(mt, 1, (void *)0x3);
2404 mtree_test_store(mt, 51, (void *)0x67);
2405 mtree_test_insert(mt, 611, (void *)0x4c7);
2406 mtree_test_insert(mt, 485, (void *)0x3cb);
2407 mtree_test_insert(mt, 1, (void *)0x3);
2408 mtree_test_erase(mt, 1);
2409 mtree_test_insert(mt, 0, (void *)0x1);
2410 mtree_test_insert(mt, 1, (void *)0x3);
2411 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2412 mtree_test_load(mt, 1);
2413 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2414 mtree_test_insert(mt, 1, (void *)0x3);
2415 mtree_test_erase(mt, 1);
2416 mtree_test_load(mt, 53);
2417 mtree_test_load(mt, 1);
2418 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2419 mtree_test_insert(mt, 222, (void *)0x1bd);
2420 mtree_test_insert(mt, 485, (void *)0x3cb);
2421 mtree_test_insert(mt, 1, (void *)0x3);
2422 mtree_test_erase(mt, 1);
2423 mtree_test_load(mt, 0);
2424 mtree_test_insert(mt, 21, (void *)0x2b);
2425 mtree_test_insert(mt, 3, (void *)0x7);
2426 mtree_test_store(mt, 621, (void *)0x4db);
2427 mtree_test_insert(mt, 0, (void *)0x1);
2428 mtree_test_erase(mt, 5);
2429 mtree_test_insert(mt, 1, (void *)0x3);
2430 mtree_test_store(mt, 62, (void *)0x7d);
2431 mtree_test_erase(mt, 62);
2432 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2433 mtree_test_insert(mt, 22, (void *)0x2d);
2434 mtree_test_insert(mt, 12, (void *)0x19);
2435 mtree_test_erase(mt, 1);
2436 mtree_test_insert(mt, 1, (void *)0x3);
2437 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2438 mtree_test_erase(mt, 62);
2439 mtree_test_erase(mt, 1);
2440 mtree_test_load(mt, 1);
2441 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2442 mtree_test_insert(mt, 1, (void *)0x3);
2443 mtree_test_erase(mt, 1);
2444 mtree_test_load(mt, 53);
2445 mtree_test_load(mt, 1);
2446 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2447 mtree_test_insert(mt, 222, (void *)0x1bd);
2448 mtree_test_insert(mt, 485, (void *)0x3cb);
2449 mtree_test_insert(mt, 1, (void *)0x3);
2450 mtree_test_erase(mt, 1);
2451 mtree_test_insert(mt, 1, (void *)0x3);
2452 mtree_test_load(mt, 0);
2453 mtree_test_load(mt, 0);
2457 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2458 * data by overwriting it first - that way metadata is of no concern.
2460 mt_init_flags(mt, 0);
2461 mtree_test_load(mt, 1);
2462 mtree_test_insert(mt, 102, (void *)0xcd);
2463 mtree_test_erase(mt, 2);
2464 mtree_test_erase(mt, 0);
2465 mtree_test_load(mt, 0);
2466 mtree_test_insert(mt, 4, (void *)0x9);
2467 mtree_test_insert(mt, 2, (void *)0x5);
2468 mtree_test_insert(mt, 110, (void *)0xdd);
2469 mtree_test_insert(mt, 1, (void *)0x3);
2470 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2471 mtree_test_erase(mt, 2);
2472 mtree_test_store(mt, 0, (void *)0x1);
2473 mtree_test_store(mt, 112, (void *)0xe1);
2474 mtree_test_insert(mt, 21, (void *)0x2b);
2475 mtree_test_store(mt, 1, (void *)0x3);
2476 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2477 mtree_test_store(mt, 2, (void *)0x5);
2478 mtree_test_load(mt, 22);
2479 mtree_test_erase(mt, 2);
2480 mtree_test_store(mt, 210, (void *)0x1a5);
2481 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2482 mtree_test_store(mt, 2, (void *)0x5);
2483 mtree_test_erase(mt, 2);
2484 mtree_test_erase(mt, 22);
2485 mtree_test_erase(mt, 1);
2486 mtree_test_erase(mt, 2);
2487 mtree_test_store(mt, 0, (void *)0x1);
2488 mtree_test_load(mt, 112);
2489 mtree_test_insert(mt, 2, (void *)0x5);
2490 mtree_test_erase(mt, 2);
2491 mtree_test_store(mt, 1, (void *)0x3);
2492 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2493 mtree_test_erase(mt, 0);
2494 mtree_test_erase(mt, 2);
2495 mtree_test_store(mt, 2, (void *)0x5);
2496 mtree_test_erase(mt, 0);
2497 mtree_test_erase(mt, 2);
2498 mtree_test_store(mt, 0, (void *)0x1);
2499 mtree_test_store(mt, 0, (void *)0x1);
2500 mtree_test_erase(mt, 2);
2501 mtree_test_store(mt, 2, (void *)0x5);
2502 mtree_test_erase(mt, 2);
2503 mtree_test_insert(mt, 2, (void *)0x5);
2504 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2505 mtree_test_erase(mt, 0);
2506 mtree_test_erase(mt, 2);
2507 mtree_test_store(mt, 0, (void *)0x1);
2508 mtree_test_load(mt, 112);
2509 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2510 mtree_test_store(mt, 2, (void *)0x5);
2511 mtree_test_load(mt, 110);
2512 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2513 mtree_test_load(mt, 2);
2514 mtree_test_store(mt, 2, (void *)0x5);
2515 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2516 mtree_test_erase(mt, 12);
2517 mtree_test_store(mt, 2, (void *)0x5);
2518 mtree_test_load(mt, 22);
2523 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2524 * may be set incorrectly to the final pivot and not the right max.
2525 * Fix by setting the left max to orig right max if the entire node is
2528 mt_init_flags(mt, 0);
2529 mtree_test_store(mt, 6, (void *)0xd);
2530 mtree_test_store(mt, 67, (void *)0x87);
2531 mtree_test_insert(mt, 15, (void *)0x1f);
2532 mtree_test_insert(mt, 6716, (void *)0x3479);
2533 mtree_test_store(mt, 61, (void *)0x7b);
2534 mtree_test_insert(mt, 13, (void *)0x1b);
2535 mtree_test_store(mt, 8, (void *)0x11);
2536 mtree_test_insert(mt, 1, (void *)0x3);
2537 mtree_test_load(mt, 0);
2538 mtree_test_erase(mt, 67167);
2539 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2540 mtree_test_insert(mt, 6, (void *)0xd);
2541 mtree_test_erase(mt, 67);
2542 mtree_test_insert(mt, 1, (void *)0x3);
2543 mtree_test_erase(mt, 667167);
2544 mtree_test_insert(mt, 6, (void *)0xd);
2545 mtree_test_store(mt, 67, (void *)0x87);
2546 mtree_test_insert(mt, 5, (void *)0xb);
2547 mtree_test_erase(mt, 1);
2548 mtree_test_insert(mt, 6, (void *)0xd);
2549 mtree_test_erase(mt, 67);
2550 mtree_test_insert(mt, 15, (void *)0x1f);
2551 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2552 mtree_test_insert(mt, 1, (void *)0x3);
2553 mtree_test_load(mt, 7);
2554 mtree_test_insert(mt, 16, (void *)0x21);
2555 mtree_test_insert(mt, 36, (void *)0x49);
2556 mtree_test_store(mt, 67, (void *)0x87);
2557 mtree_test_store(mt, 6, (void *)0xd);
2558 mtree_test_insert(mt, 367, (void *)0x2df);
2559 mtree_test_insert(mt, 115, (void *)0xe7);
2560 mtree_test_store(mt, 0, (void *)0x1);
2561 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2562 mtree_test_store(mt, 1, (void *)0x3);
2563 mtree_test_erase(mt, 67167);
2564 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2565 mtree_test_store(mt, 1, (void *)0x3);
2566 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2567 mtree_test_load(mt, 67);
2568 mtree_test_insert(mt, 1, (void *)0x3);
2569 mtree_test_erase(mt, 67167);
2573 * 9. spanning store to the end of data caused an invalid metadata
2574 * length which resulted in a crash eventually.
2575 * Fix by checking if there is a value in pivot before incrementing the
2576 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2577 * abstract the two locations this happens into a function called
2578 * mas_leaf_set_meta().
2580 mt_init_flags(mt, 0);
2581 mtree_test_insert(mt, 21, (void *)0x2b);
2582 mtree_test_insert(mt, 12, (void *)0x19);
2583 mtree_test_insert(mt, 6, (void *)0xd);
2584 mtree_test_insert(mt, 8, (void *)0x11);
2585 mtree_test_insert(mt, 2, (void *)0x5);
2586 mtree_test_insert(mt, 91, (void *)0xb7);
2587 mtree_test_insert(mt, 18, (void *)0x25);
2588 mtree_test_insert(mt, 81, (void *)0xa3);
2589 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2590 mtree_test_store(mt, 1, (void *)0x3);
2591 mtree_test_erase(mt, 8);
2592 mtree_test_insert(mt, 11, (void *)0x17);
2593 mtree_test_insert(mt, 8, (void *)0x11);
2594 mtree_test_insert(mt, 21, (void *)0x2b);
2595 mtree_test_insert(mt, 2, (void *)0x5);
2596 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2597 mtree_test_erase(mt, ULONG_MAX - 10);
2598 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2599 mtree_test_erase(mt, 2);
2600 mtree_test_insert(mt, 1211, (void *)0x977);
2601 mtree_test_insert(mt, 111, (void *)0xdf);
2602 mtree_test_insert(mt, 13, (void *)0x1b);
2603 mtree_test_insert(mt, 211, (void *)0x1a7);
2604 mtree_test_insert(mt, 11, (void *)0x17);
2605 mtree_test_insert(mt, 5, (void *)0xb);
2606 mtree_test_insert(mt, 1218, (void *)0x985);
2607 mtree_test_insert(mt, 61, (void *)0x7b);
2608 mtree_test_store(mt, 1, (void *)0x3);
2609 mtree_test_insert(mt, 121, (void *)0xf3);
2610 mtree_test_insert(mt, 8, (void *)0x11);
2611 mtree_test_insert(mt, 21, (void *)0x2b);
2612 mtree_test_insert(mt, 2, (void *)0x5);
2613 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2614 mtree_test_erase(mt, ULONG_MAX - 10);
2617 /* duplicate the tree with a specific gap */
2618 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2619 unsigned long nr_entries, bool zero_start,
2622 unsigned long i = 0;
2623 struct maple_tree newmt;
2626 MA_STATE(mas, mt, 0, 0);
2627 MA_STATE(newmas, &newmt, 0, 0);
2628 struct rw_semaphore newmt_lock;
2630 init_rwsem(&newmt_lock);
2631 mt_set_external_lock(&newmt, &newmt_lock);
2636 mt_zero_nr_tallocated();
2637 for (; i <= nr_entries; i++)
2638 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2639 xa_mk_value(i), GFP_KERNEL);
2641 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2642 mt_set_non_kernel(99999);
2643 down_write(&newmt_lock);
2644 ret = mas_expected_entries(&newmas, nr_entries);
2645 mt_set_non_kernel(0);
2646 MT_BUG_ON(mt, ret != 0);
2649 mas_for_each(&mas, tmp, ULONG_MAX) {
2650 newmas.index = mas.index;
2651 newmas.last = mas.last;
2652 mas_store(&newmas, tmp);
2655 mas_destroy(&newmas);
2657 __mt_destroy(&newmt);
2658 up_write(&newmt_lock);
2661 /* Duplicate many sizes of trees. Mainly to test expected entry values */
2662 static noinline void __init check_dup(struct maple_tree *mt)
2665 int big_start = 100010;
2667 /* Check with a value at zero */
2668 for (i = 10; i < 1000; i++) {
2669 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2670 check_dup_gaps(mt, i, true, 5);
2677 /* Check with a value at zero, no gap */
2678 for (i = 1000; i < 2000; i++) {
2679 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2680 check_dup_gaps(mt, i, true, 0);
2687 /* Check with a value at zero and unreasonably large */
2688 for (i = big_start; i < big_start + 10; i++) {
2689 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2690 check_dup_gaps(mt, i, true, 5);
2697 /* Small to medium size not starting at zero*/
2698 for (i = 200; i < 1000; i++) {
2699 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2700 check_dup_gaps(mt, i, false, 5);
2707 /* Unreasonably large not starting at zero*/
2708 for (i = big_start; i < big_start + 10; i++) {
2709 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2710 check_dup_gaps(mt, i, false, 5);
2717 /* Check non-allocation tree not starting at zero */
2718 for (i = 1500; i < 3000; i++) {
2719 mt_init_flags(mt, 0);
2720 check_dup_gaps(mt, i, false, 5);
2729 /* Check non-allocation tree starting at zero */
2730 for (i = 200; i < 1000; i++) {
2731 mt_init_flags(mt, 0);
2732 check_dup_gaps(mt, i, true, 5);
2739 /* Unreasonably large */
2740 for (i = big_start + 5; i < big_start + 10; i++) {
2741 mt_init_flags(mt, 0);
2742 check_dup_gaps(mt, i, true, 5);
2750 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2753 MA_STATE(mas, mt, 0, 0);
2755 mt_set_non_kernel(9999);
2758 mas_set_range(&mas, i*10, i*10+9);
2759 mas_store(&mas, check_bnode_min_spanning);
2762 mas_set_range(&mas, 240, 509);
2763 mas_store(&mas, NULL);
2766 mt_set_non_kernel(0);
2769 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2771 unsigned long i, nr_entries = 20;
2772 MA_STATE(mas, mt, 0, 0);
2774 for (i = 1; i <= nr_entries; i++)
2775 mtree_store_range(mt, i*10, i*10 + 9,
2776 xa_mk_value(i), GFP_KERNEL);
2778 /* Create another hole besides the one at 0 */
2779 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2781 /* Check lower bounds that don't fit */
2783 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2786 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2788 /* Check lower bound that does fit */
2790 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2791 MT_BUG_ON(mt, mas.index != 5);
2792 MT_BUG_ON(mt, mas.last != 9);
2795 /* Check one gap that doesn't fit and one that does */
2798 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2799 MT_BUG_ON(mt, mas.index != 161);
2800 MT_BUG_ON(mt, mas.last != 169);
2802 /* Check one gap that does fit above the min */
2804 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2805 MT_BUG_ON(mt, mas.index != 216);
2806 MT_BUG_ON(mt, mas.last != 218);
2808 /* Check size that doesn't fit any gap */
2810 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2813 * Check size that doesn't fit the lower end of the window but
2817 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2820 * Check size that doesn't fit the upper end of the window but
2824 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2826 /* Check mas_empty_area forward */
2828 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2829 MT_BUG_ON(mt, mas.index != 0);
2830 MT_BUG_ON(mt, mas.last != 8);
2833 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2834 MT_BUG_ON(mt, mas.index != 0);
2835 MT_BUG_ON(mt, mas.last != 3);
2838 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2841 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2844 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2847 mas_empty_area(&mas, 100, 165, 3);
2850 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2854 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2856 const unsigned long max = 0x25D78000;
2859 MA_STATE(mas, mt, 0, 0);
2861 mt_set_non_kernel(99999);
2862 for (shift = 12; shift <= 16; shift++) {
2868 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2869 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2870 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2876 /* No space left. */
2879 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2882 /* Fill a depth 3 node to the maximum */
2883 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2884 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2885 /* Make space in the second-last depth 4 node */
2886 mtree_erase(mt, 631668735);
2887 /* Make space in the last depth 4 node */
2888 mtree_erase(mt, 629506047);
2890 /* Search from just after the gap in the second-last depth 4 */
2892 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2894 mt_set_non_kernel(0);
2898 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2900 * The table below shows the single entry tree (0-0 pointer) and normal tree
2903 * Function ENTRY Start Result index & last
2904 * ┬ ┬ ┬ ┬ ┬
2905 * │ │ │ │ └─ the final range
2906 * │ │ │ └─ The node value after execution
2907 * │ │ └─ The node value before execution
2908 * │ └─ If the entry exists or does not exists (DNE)
2909 * └─ The function name
2911 * Function ENTRY Start Result index & last
2914 * Single entry tree at 0-0
2915 * ------------------------
2916 * DNE MAS_START MAS_NONE 1 - oo
2917 * DNE MAS_PAUSE MAS_NONE 1 - oo
2918 * DNE MAS_ROOT MAS_NONE 1 - oo
2920 * DNE MAS_NONE MAS_ROOT 0
2922 * DNE MAS_NONE MAS_NONE 1 - oo
2926 * exists MAS_START active range
2927 * DNE MAS_START active set to last range
2928 * exists MAS_PAUSE active range
2929 * DNE MAS_PAUSE active set to last range
2930 * exists MAS_NONE active range
2931 * exists active active range
2932 * DNE active active set to last range
2933 * ERANGE active MAS_OVERFLOW last range
2935 * Function ENTRY Start Result index & last
2938 * Single entry tree at 0-0
2939 * ------------------------
2941 * exists MAS_START MAS_ROOT 0
2942 * exists MAS_PAUSE MAS_ROOT 0
2943 * exists MAS_NONE MAS_ROOT 0
2946 * DNE MAS_START MAS_NONE 0
2947 * DNE MAS_PAUSE MAS_NONE 0
2948 * DNE MAS_NONE MAS_NONE 0
2949 * DNE MAS_ROOT MAS_NONE 0
2953 * exists MAS_START active range
2954 * DNE MAS_START active set to min
2955 * exists MAS_PAUSE active range
2956 * DNE MAS_PAUSE active set to min
2957 * exists MAS_NONE active range
2958 * DNE MAS_NONE MAS_NONE set to min
2959 * any MAS_ROOT MAS_NONE 0
2960 * exists active active range
2961 * DNE active active last range
2962 * ERANGE active MAS_UNDERFLOW last range
2964 * Function ENTRY Start Result index & last
2966 * - at index or next
2967 * Single entry tree at 0-0
2968 * ------------------------
2970 * DNE MAS_START MAS_NONE 0
2971 * DNE MAS_PAUSE MAS_NONE 0
2972 * DNE MAS_ROOT MAS_NONE 0
2973 * DNE MAS_NONE MAS_NONE 1
2975 * exists MAS_START MAS_ROOT 0
2976 * exists MAS_PAUSE MAS_ROOT 0
2977 * exists MAS_NONE MAS_ROOT 0
2981 * exists MAS_START active range
2982 * DNE MAS_START active set to max
2983 * exists MAS_PAUSE active range
2984 * DNE MAS_PAUSE active set to max
2985 * exists MAS_NONE active range (start at last)
2986 * exists active active range
2987 * DNE active active last range (max < last)
2989 * Function ENTRY Start Result index & last
2991 * - at index or before
2992 * Single entry tree at 0-0
2993 * ------------------------
2995 * exists MAS_START MAS_ROOT 0
2996 * exists MAS_PAUSE MAS_ROOT 0
2997 * exists MAS_NONE MAS_ROOT 0
2999 * DNE MAS_START MAS_NONE 0
3000 * DNE MAS_PAUSE MAS_NONE 0
3001 * DNE MAS_NONE MAS_NONE 0
3002 * DNE MAS_ROOT MAS_NONE 0
3006 * exists MAS_START active range
3007 * DNE MAS_START active set to min
3008 * exists MAS_PAUSE active range
3009 * DNE MAS_PAUSE active set to min
3010 * exists MAS_NONE active range (start at index)
3011 * exists active active range
3012 * DNE active active last range (min > index)
3014 * Function ENTRY Start Result index & last
3017 * Single entry tree at 0-0
3018 * ------------------------
3020 * DNE MAS_START MAS_ROOT 1 - oo
3021 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3022 * DNE MAS_NONE MAS_ROOT 1 - oo
3023 * DNE MAS_ROOT MAS_ROOT 1 - oo
3025 * exists MAS_START MAS_ROOT 0
3026 * exists MAS_PAUSE MAS_ROOT 0
3027 * exists MAS_NONE MAS_ROOT 0
3028 * exists MAS_ROOT MAS_ROOT 0
3032 * exists MAS_START active range
3033 * DNE MAS_START active range of NULL
3034 * exists MAS_PAUSE active range
3035 * DNE MAS_PAUSE active range of NULL
3036 * exists MAS_NONE active range
3037 * DNE MAS_NONE active range of NULL
3038 * exists active active range
3039 * DNE active active range of NULL
3042 #define mas_active(x) (((x).node != MAS_ROOT) && \
3043 ((x).node != MAS_START) && \
3044 ((x).node != MAS_PAUSE) && \
3045 ((x).node != MAS_NONE))
3046 static noinline void __init check_state_handling(struct maple_tree *mt)
3048 MA_STATE(mas, mt, 0, 0);
3049 void *entry, *ptr = (void *) 0x1234500;
3053 /* Check MAS_ROOT First */
3054 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3057 /* prev: Start -> underflow*/
3058 entry = mas_prev(&mas, 0);
3059 MT_BUG_ON(mt, entry != NULL);
3060 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3062 /* prev: Start -> root */
3064 entry = mas_prev(&mas, 0);
3065 MT_BUG_ON(mt, entry != ptr);
3066 MT_BUG_ON(mt, mas.index != 0);
3067 MT_BUG_ON(mt, mas.last != 0);
3068 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3070 /* prev: pause -> root */
3073 entry = mas_prev(&mas, 0);
3074 MT_BUG_ON(mt, entry != ptr);
3075 MT_BUG_ON(mt, mas.index != 0);
3076 MT_BUG_ON(mt, mas.last != 0);
3077 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3079 /* next: start -> none */
3081 entry = mas_next(&mas, ULONG_MAX);
3082 MT_BUG_ON(mt, mas.index != 1);
3083 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3084 MT_BUG_ON(mt, entry != NULL);
3085 MT_BUG_ON(mt, mas.node != MAS_NONE);
3087 /* next: start -> none*/
3089 entry = mas_next(&mas, ULONG_MAX);
3090 MT_BUG_ON(mt, mas.index != 1);
3091 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3092 MT_BUG_ON(mt, entry != NULL);
3093 MT_BUG_ON(mt, mas.node != MAS_NONE);
3095 /* find: start -> root */
3097 entry = mas_find(&mas, ULONG_MAX);
3098 MT_BUG_ON(mt, entry != ptr);
3099 MT_BUG_ON(mt, mas.index != 0);
3100 MT_BUG_ON(mt, mas.last != 0);
3101 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3103 /* find: root -> none */
3104 entry = mas_find(&mas, ULONG_MAX);
3105 MT_BUG_ON(mt, entry != NULL);
3106 MT_BUG_ON(mt, mas.index != 1);
3107 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3108 MT_BUG_ON(mt, mas.node != MAS_NONE);
3110 /* find: none -> none */
3111 entry = mas_find(&mas, ULONG_MAX);
3112 MT_BUG_ON(mt, entry != NULL);
3113 MT_BUG_ON(mt, mas.index != 1);
3114 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3115 MT_BUG_ON(mt, mas.node != MAS_NONE);
3117 /* find: start -> none */
3119 entry = mas_find(&mas, ULONG_MAX);
3120 MT_BUG_ON(mt, entry != NULL);
3121 MT_BUG_ON(mt, mas.index != 1);
3122 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3123 MT_BUG_ON(mt, mas.node != MAS_NONE);
3125 /* find_rev: none -> root */
3126 entry = mas_find_rev(&mas, 0);
3127 MT_BUG_ON(mt, entry != ptr);
3128 MT_BUG_ON(mt, mas.index != 0);
3129 MT_BUG_ON(mt, mas.last != 0);
3130 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3132 /* find_rev: start -> root */
3134 entry = mas_find_rev(&mas, 0);
3135 MT_BUG_ON(mt, entry != ptr);
3136 MT_BUG_ON(mt, mas.index != 0);
3137 MT_BUG_ON(mt, mas.last != 0);
3138 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3140 /* find_rev: root -> none */
3141 entry = mas_find_rev(&mas, 0);
3142 MT_BUG_ON(mt, entry != NULL);
3143 MT_BUG_ON(mt, mas.index != 0);
3144 MT_BUG_ON(mt, mas.last != 0);
3145 MT_BUG_ON(mt, mas.node != MAS_NONE);
3147 /* find_rev: none -> none */
3148 entry = mas_find_rev(&mas, 0);
3149 MT_BUG_ON(mt, entry != NULL);
3150 MT_BUG_ON(mt, mas.index != 0);
3151 MT_BUG_ON(mt, mas.last != 0);
3152 MT_BUG_ON(mt, mas.node != MAS_NONE);
3154 /* find_rev: start -> root */
3156 entry = mas_find_rev(&mas, 0);
3157 MT_BUG_ON(mt, entry != ptr);
3158 MT_BUG_ON(mt, mas.index != 0);
3159 MT_BUG_ON(mt, mas.last != 0);
3160 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3162 /* walk: start -> none */
3164 entry = mas_walk(&mas);
3165 MT_BUG_ON(mt, entry != NULL);
3166 MT_BUG_ON(mt, mas.index != 1);
3167 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3168 MT_BUG_ON(mt, mas.node != MAS_NONE);
3170 /* walk: pause -> none*/
3173 entry = mas_walk(&mas);
3174 MT_BUG_ON(mt, entry != NULL);
3175 MT_BUG_ON(mt, mas.index != 1);
3176 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3177 MT_BUG_ON(mt, mas.node != MAS_NONE);
3179 /* walk: none -> none */
3180 mas.index = mas.last = 10;
3181 entry = mas_walk(&mas);
3182 MT_BUG_ON(mt, entry != NULL);
3183 MT_BUG_ON(mt, mas.index != 1);
3184 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3185 MT_BUG_ON(mt, mas.node != MAS_NONE);
3187 /* walk: none -> none */
3188 entry = mas_walk(&mas);
3189 MT_BUG_ON(mt, entry != NULL);
3190 MT_BUG_ON(mt, mas.index != 1);
3191 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3192 MT_BUG_ON(mt, mas.node != MAS_NONE);
3194 /* walk: start -> root */
3196 entry = mas_walk(&mas);
3197 MT_BUG_ON(mt, entry != ptr);
3198 MT_BUG_ON(mt, mas.index != 0);
3199 MT_BUG_ON(mt, mas.last != 0);
3200 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3202 /* walk: pause -> root */
3205 entry = mas_walk(&mas);
3206 MT_BUG_ON(mt, entry != ptr);
3207 MT_BUG_ON(mt, mas.index != 0);
3208 MT_BUG_ON(mt, mas.last != 0);
3209 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3211 /* walk: none -> root */
3212 mas.node = MAS_NONE;
3213 entry = mas_walk(&mas);
3214 MT_BUG_ON(mt, entry != ptr);
3215 MT_BUG_ON(mt, mas.index != 0);
3216 MT_BUG_ON(mt, mas.last != 0);
3217 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3219 /* walk: root -> root */
3220 entry = mas_walk(&mas);
3221 MT_BUG_ON(mt, entry != ptr);
3222 MT_BUG_ON(mt, mas.index != 0);
3223 MT_BUG_ON(mt, mas.last != 0);
3224 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3226 /* walk: root -> none */
3228 entry = mas_walk(&mas);
3229 MT_BUG_ON(mt, entry != NULL);
3230 MT_BUG_ON(mt, mas.index != 1);
3231 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3232 MT_BUG_ON(mt, mas.node != MAS_NONE);
3234 /* walk: none -> root */
3235 mas.index = mas.last = 0;
3236 entry = mas_walk(&mas);
3237 MT_BUG_ON(mt, entry != ptr);
3238 MT_BUG_ON(mt, mas.index != 0);
3239 MT_BUG_ON(mt, mas.last != 0);
3240 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3244 /* Check when there is an actual node */
3245 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3246 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3247 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3248 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3252 /* next: start ->active */
3254 entry = mas_next(&mas, ULONG_MAX);
3255 MT_BUG_ON(mt, entry != ptr);
3256 MT_BUG_ON(mt, mas.index != 0x1000);
3257 MT_BUG_ON(mt, mas.last != 0x1500);
3258 MT_BUG_ON(mt, !mas_active(mas));
3260 /* next: pause ->active */
3263 entry = mas_next(&mas, ULONG_MAX);
3264 MT_BUG_ON(mt, entry != ptr);
3265 MT_BUG_ON(mt, mas.index != 0x1000);
3266 MT_BUG_ON(mt, mas.last != 0x1500);
3267 MT_BUG_ON(mt, !mas_active(mas));
3269 /* next: none ->active */
3270 mas.index = mas.last = 0;
3272 mas.node = MAS_NONE;
3273 entry = mas_next(&mas, ULONG_MAX);
3274 MT_BUG_ON(mt, entry != ptr);
3275 MT_BUG_ON(mt, mas.index != 0x1000);
3276 MT_BUG_ON(mt, mas.last != 0x1500);
3277 MT_BUG_ON(mt, !mas_active(mas));
3279 /* next:active ->active */
3280 entry = mas_next(&mas, ULONG_MAX);
3281 MT_BUG_ON(mt, entry != ptr2);
3282 MT_BUG_ON(mt, mas.index != 0x2000);
3283 MT_BUG_ON(mt, mas.last != 0x2500);
3284 MT_BUG_ON(mt, !mas_active(mas));
3286 /* next:active -> active beyond data */
3287 entry = mas_next(&mas, 0x2999);
3288 MT_BUG_ON(mt, entry != NULL);
3289 MT_BUG_ON(mt, mas.index != 0x2501);
3290 MT_BUG_ON(mt, mas.last != 0x2fff);
3291 MT_BUG_ON(mt, !mas_active(mas));
3293 /* Continue after last range ends after max */
3294 entry = mas_next(&mas, ULONG_MAX);
3295 MT_BUG_ON(mt, entry != ptr3);
3296 MT_BUG_ON(mt, mas.index != 0x3000);
3297 MT_BUG_ON(mt, mas.last != 0x3500);
3298 MT_BUG_ON(mt, !mas_active(mas));
3300 /* next:active -> active continued */
3301 entry = mas_next(&mas, ULONG_MAX);
3302 MT_BUG_ON(mt, entry != NULL);
3303 MT_BUG_ON(mt, mas.index != 0x3501);
3304 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3305 MT_BUG_ON(mt, !mas_active(mas));
3307 /* next:active -> overflow */
3308 entry = mas_next(&mas, ULONG_MAX);
3309 MT_BUG_ON(mt, entry != NULL);
3310 MT_BUG_ON(mt, mas.index != 0x3501);
3311 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3312 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3314 /* next:overflow -> overflow */
3315 entry = mas_next(&mas, ULONG_MAX);
3316 MT_BUG_ON(mt, entry != NULL);
3317 MT_BUG_ON(mt, mas.index != 0x3501);
3318 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3319 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3321 /* prev:overflow -> active */
3322 entry = mas_prev(&mas, 0);
3323 MT_BUG_ON(mt, entry != ptr3);
3324 MT_BUG_ON(mt, mas.index != 0x3000);
3325 MT_BUG_ON(mt, mas.last != 0x3500);
3326 MT_BUG_ON(mt, !mas_active(mas));
3328 /* next: none -> active, skip value at location */
3330 entry = mas_next(&mas, ULONG_MAX);
3331 mas.node = MAS_NONE;
3333 entry = mas_next(&mas, ULONG_MAX);
3334 MT_BUG_ON(mt, entry != ptr2);
3335 MT_BUG_ON(mt, mas.index != 0x2000);
3336 MT_BUG_ON(mt, mas.last != 0x2500);
3337 MT_BUG_ON(mt, !mas_active(mas));
3339 /* prev:active ->active */
3340 entry = mas_prev(&mas, 0);
3341 MT_BUG_ON(mt, entry != ptr);
3342 MT_BUG_ON(mt, mas.index != 0x1000);
3343 MT_BUG_ON(mt, mas.last != 0x1500);
3344 MT_BUG_ON(mt, !mas_active(mas));
3346 /* prev:active -> active spanning end range */
3347 entry = mas_prev(&mas, 0x0100);
3348 MT_BUG_ON(mt, entry != NULL);
3349 MT_BUG_ON(mt, mas.index != 0);
3350 MT_BUG_ON(mt, mas.last != 0x0FFF);
3351 MT_BUG_ON(mt, !mas_active(mas));
3353 /* prev:active -> underflow */
3354 entry = mas_prev(&mas, 0);
3355 MT_BUG_ON(mt, entry != NULL);
3356 MT_BUG_ON(mt, mas.index != 0);
3357 MT_BUG_ON(mt, mas.last != 0x0FFF);
3358 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3360 /* prev:underflow -> underflow */
3361 entry = mas_prev(&mas, 0);
3362 MT_BUG_ON(mt, entry != NULL);
3363 MT_BUG_ON(mt, mas.index != 0);
3364 MT_BUG_ON(mt, mas.last != 0x0FFF);
3365 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3367 /* next:underflow -> active */
3368 entry = mas_next(&mas, ULONG_MAX);
3369 MT_BUG_ON(mt, entry != ptr);
3370 MT_BUG_ON(mt, mas.index != 0x1000);
3371 MT_BUG_ON(mt, mas.last != 0x1500);
3372 MT_BUG_ON(mt, !mas_active(mas));
3374 /* prev:first value -> underflow */
3375 entry = mas_prev(&mas, 0x1000);
3376 MT_BUG_ON(mt, entry != NULL);
3377 MT_BUG_ON(mt, mas.index != 0x1000);
3378 MT_BUG_ON(mt, mas.last != 0x1500);
3379 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3381 /* find:underflow -> first value */
3382 entry = mas_find(&mas, ULONG_MAX);
3383 MT_BUG_ON(mt, entry != ptr);
3384 MT_BUG_ON(mt, mas.index != 0x1000);
3385 MT_BUG_ON(mt, mas.last != 0x1500);
3386 MT_BUG_ON(mt, !mas_active(mas));
3388 /* prev: pause ->active */
3389 mas_set(&mas, 0x3600);
3390 entry = mas_prev(&mas, 0);
3391 MT_BUG_ON(mt, entry != ptr3);
3393 entry = mas_prev(&mas, 0);
3394 MT_BUG_ON(mt, entry != ptr2);
3395 MT_BUG_ON(mt, mas.index != 0x2000);
3396 MT_BUG_ON(mt, mas.last != 0x2500);
3397 MT_BUG_ON(mt, !mas_active(mas));
3399 /* prev:active -> active spanning min */
3400 entry = mas_prev(&mas, 0x1600);
3401 MT_BUG_ON(mt, entry != NULL);
3402 MT_BUG_ON(mt, mas.index != 0x1501);
3403 MT_BUG_ON(mt, mas.last != 0x1FFF);
3404 MT_BUG_ON(mt, !mas_active(mas));
3406 /* prev: active ->active, continue */
3407 entry = mas_prev(&mas, 0);
3408 MT_BUG_ON(mt, entry != ptr);
3409 MT_BUG_ON(mt, mas.index != 0x1000);
3410 MT_BUG_ON(mt, mas.last != 0x1500);
3411 MT_BUG_ON(mt, !mas_active(mas));
3413 /* find: start ->active */
3415 entry = mas_find(&mas, ULONG_MAX);
3416 MT_BUG_ON(mt, entry != ptr);
3417 MT_BUG_ON(mt, mas.index != 0x1000);
3418 MT_BUG_ON(mt, mas.last != 0x1500);
3419 MT_BUG_ON(mt, !mas_active(mas));
3421 /* find: pause ->active */
3424 entry = mas_find(&mas, ULONG_MAX);
3425 MT_BUG_ON(mt, entry != ptr);
3426 MT_BUG_ON(mt, mas.index != 0x1000);
3427 MT_BUG_ON(mt, mas.last != 0x1500);
3428 MT_BUG_ON(mt, !mas_active(mas));
3430 /* find: start ->active on value */;
3431 mas_set(&mas, 1200);
3432 entry = mas_find(&mas, ULONG_MAX);
3433 MT_BUG_ON(mt, entry != ptr);
3434 MT_BUG_ON(mt, mas.index != 0x1000);
3435 MT_BUG_ON(mt, mas.last != 0x1500);
3436 MT_BUG_ON(mt, !mas_active(mas));
3438 /* find:active ->active */
3439 entry = mas_find(&mas, ULONG_MAX);
3440 MT_BUG_ON(mt, entry != ptr2);
3441 MT_BUG_ON(mt, mas.index != 0x2000);
3442 MT_BUG_ON(mt, mas.last != 0x2500);
3443 MT_BUG_ON(mt, !mas_active(mas));
3446 /* find:active -> active (NULL)*/
3447 entry = mas_find(&mas, 0x2700);
3448 MT_BUG_ON(mt, entry != NULL);
3449 MT_BUG_ON(mt, mas.index != 0x2501);
3450 MT_BUG_ON(mt, mas.last != 0x2FFF);
3451 MT_BUG_ON(mt, !mas_active(mas));
3453 /* find: overflow ->active */
3454 entry = mas_find(&mas, 0x5000);
3455 MT_BUG_ON(mt, entry != ptr3);
3456 MT_BUG_ON(mt, mas.index != 0x3000);
3457 MT_BUG_ON(mt, mas.last != 0x3500);
3458 MT_BUG_ON(mt, !mas_active(mas));
3460 /* find:active -> active (NULL) end*/
3461 entry = mas_find(&mas, ULONG_MAX);
3462 MT_BUG_ON(mt, entry != NULL);
3463 MT_BUG_ON(mt, mas.index != 0x3501);
3464 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3465 MT_BUG_ON(mt, !mas_active(mas));
3467 /* find_rev: active (END) ->active */
3468 entry = mas_find_rev(&mas, 0);
3469 MT_BUG_ON(mt, entry != ptr3);
3470 MT_BUG_ON(mt, mas.index != 0x3000);
3471 MT_BUG_ON(mt, mas.last != 0x3500);
3472 MT_BUG_ON(mt, !mas_active(mas));
3474 /* find_rev:active ->active */
3475 entry = mas_find_rev(&mas, 0);
3476 MT_BUG_ON(mt, entry != ptr2);
3477 MT_BUG_ON(mt, mas.index != 0x2000);
3478 MT_BUG_ON(mt, mas.last != 0x2500);
3479 MT_BUG_ON(mt, !mas_active(mas));
3481 /* find_rev: pause ->active */
3483 entry = mas_find_rev(&mas, 0);
3484 MT_BUG_ON(mt, entry != ptr);
3485 MT_BUG_ON(mt, mas.index != 0x1000);
3486 MT_BUG_ON(mt, mas.last != 0x1500);
3487 MT_BUG_ON(mt, !mas_active(mas));
3489 /* find_rev:active -> active */
3490 entry = mas_find_rev(&mas, 0);
3491 MT_BUG_ON(mt, entry != NULL);
3492 MT_BUG_ON(mt, mas.index != 0);
3493 MT_BUG_ON(mt, mas.last != 0x0FFF);
3494 MT_BUG_ON(mt, !mas_active(mas));
3496 /* find_rev: start ->active */
3497 mas_set(&mas, 0x1200);
3498 entry = mas_find_rev(&mas, 0);
3499 MT_BUG_ON(mt, entry != ptr);
3500 MT_BUG_ON(mt, mas.index != 0x1000);
3501 MT_BUG_ON(mt, mas.last != 0x1500);
3502 MT_BUG_ON(mt, !mas_active(mas));
3504 /* mas_walk start ->active */
3505 mas_set(&mas, 0x1200);
3506 entry = mas_walk(&mas);
3507 MT_BUG_ON(mt, entry != ptr);
3508 MT_BUG_ON(mt, mas.index != 0x1000);
3509 MT_BUG_ON(mt, mas.last != 0x1500);
3510 MT_BUG_ON(mt, !mas_active(mas));
3512 /* mas_walk start ->active */
3513 mas_set(&mas, 0x1600);
3514 entry = mas_walk(&mas);
3515 MT_BUG_ON(mt, entry != NULL);
3516 MT_BUG_ON(mt, mas.index != 0x1501);
3517 MT_BUG_ON(mt, mas.last != 0x1fff);
3518 MT_BUG_ON(mt, !mas_active(mas));
3520 /* mas_walk pause ->active */
3521 mas_set(&mas, 0x1200);
3523 entry = mas_walk(&mas);
3524 MT_BUG_ON(mt, entry != ptr);
3525 MT_BUG_ON(mt, mas.index != 0x1000);
3526 MT_BUG_ON(mt, mas.last != 0x1500);
3527 MT_BUG_ON(mt, !mas_active(mas));
3529 /* mas_walk pause -> active */
3530 mas_set(&mas, 0x1600);
3532 entry = mas_walk(&mas);
3533 MT_BUG_ON(mt, entry != NULL);
3534 MT_BUG_ON(mt, mas.index != 0x1501);
3535 MT_BUG_ON(mt, mas.last != 0x1fff);
3536 MT_BUG_ON(mt, !mas_active(mas));
3538 /* mas_walk none -> active */
3539 mas_set(&mas, 0x1200);
3540 mas.node = MAS_NONE;
3541 entry = mas_walk(&mas);
3542 MT_BUG_ON(mt, entry != ptr);
3543 MT_BUG_ON(mt, mas.index != 0x1000);
3544 MT_BUG_ON(mt, mas.last != 0x1500);
3545 MT_BUG_ON(mt, !mas_active(mas));
3547 /* mas_walk none -> active */
3548 mas_set(&mas, 0x1600);
3549 mas.node = MAS_NONE;
3550 entry = mas_walk(&mas);
3551 MT_BUG_ON(mt, entry != NULL);
3552 MT_BUG_ON(mt, mas.index != 0x1501);
3553 MT_BUG_ON(mt, mas.last != 0x1fff);
3554 MT_BUG_ON(mt, !mas_active(mas));
3556 /* mas_walk active -> active */
3560 entry = mas_walk(&mas);
3561 MT_BUG_ON(mt, entry != ptr);
3562 MT_BUG_ON(mt, mas.index != 0x1000);
3563 MT_BUG_ON(mt, mas.last != 0x1500);
3564 MT_BUG_ON(mt, !mas_active(mas));
3566 /* mas_walk active -> active */
3569 entry = mas_walk(&mas);
3570 MT_BUG_ON(mt, entry != NULL);
3571 MT_BUG_ON(mt, mas.index != 0x1501);
3572 MT_BUG_ON(mt, mas.last != 0x1fff);
3573 MT_BUG_ON(mt, !mas_active(mas));
3578 static DEFINE_MTREE(tree);
3579 static int __init maple_tree_seed(void)
3581 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3582 1001, 1002, 1003, 1005, 0,
3586 pr_info("\nTEST STARTING\n\n");
3588 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3589 check_root_expand(&tree);
3590 mtree_destroy(&tree);
3592 #if defined(BENCH_SLOT_STORE)
3594 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3595 bench_slot_store(&tree);
3596 mtree_destroy(&tree);
3599 #if defined(BENCH_NODE_STORE)
3601 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3602 bench_node_store(&tree);
3603 mtree_destroy(&tree);
3606 #if defined(BENCH_AWALK)
3608 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3610 mtree_destroy(&tree);
3613 #if defined(BENCH_WALK)
3615 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3617 mtree_destroy(&tree);
3620 #if defined(BENCH_FORK)
3622 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3623 bench_forking(&tree);
3624 mtree_destroy(&tree);
3627 #if defined(BENCH_MT_FOR_EACH)
3629 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3630 bench_mt_for_each(&tree);
3631 mtree_destroy(&tree);
3634 #if defined(BENCH_MAS_FOR_EACH)
3636 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3637 bench_mas_for_each(&tree);
3638 mtree_destroy(&tree);
3641 #if defined(BENCH_MAS_PREV)
3643 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3644 bench_mas_prev(&tree);
3645 mtree_destroy(&tree);
3649 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3650 check_iteration(&tree);
3651 mtree_destroy(&tree);
3653 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3654 check_forking(&tree);
3655 mtree_destroy(&tree);
3657 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3658 check_mas_store_gfp(&tree);
3659 mtree_destroy(&tree);
3661 /* Test ranges (store and insert) */
3662 mt_init_flags(&tree, 0);
3663 check_ranges(&tree);
3664 mtree_destroy(&tree);
3666 #if defined(CONFIG_64BIT)
3667 /* These tests have ranges outside of 4GB */
3668 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3669 check_alloc_range(&tree);
3670 mtree_destroy(&tree);
3672 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3673 check_alloc_rev_range(&tree);
3674 mtree_destroy(&tree);
3677 mt_init_flags(&tree, 0);
3679 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3681 check_insert(&tree, set[9], &tree); /* Insert 0 */
3682 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3683 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3685 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3686 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3687 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3688 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3690 /* Clear out the tree */
3691 mtree_destroy(&tree);
3693 /* Try to insert, insert a dup, and load back what was inserted. */
3694 mt_init_flags(&tree, 0);
3695 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3696 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3697 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3700 * Second set of tests try to load a value that doesn't exist, inserts
3701 * a second value, then loads the value again
3703 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3704 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3705 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3706 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3708 * Tree currently contains:
3709 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3711 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3712 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3714 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3715 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3716 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3717 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3719 /* Clear out tree */
3720 mtree_destroy(&tree);
3722 mt_init_flags(&tree, 0);
3723 /* Test inserting into a NULL hole. */
3724 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3725 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3726 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3727 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3728 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3729 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3731 /* Clear out the tree */
3732 mtree_destroy(&tree);
3734 mt_init_flags(&tree, 0);
3736 * set[] = {5015, 5014, 5017, 25, 1000,
3737 * 1001, 1002, 1003, 1005, 0,
3741 check_insert(&tree, set[0], ptr); /* 5015 */
3742 check_insert(&tree, set[1], &tree); /* 5014 */
3743 check_insert(&tree, set[2], ptr); /* 5017 */
3744 check_insert(&tree, set[3], &tree); /* 25 */
3745 check_load(&tree, set[0], ptr);
3746 check_load(&tree, set[1], &tree);
3747 check_load(&tree, set[2], ptr);
3748 check_load(&tree, set[3], &tree);
3749 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3750 check_load(&tree, set[0], ptr);
3751 check_load(&tree, set[1], &tree);
3752 check_load(&tree, set[2], ptr);
3753 check_load(&tree, set[3], &tree); /*25 */
3754 check_load(&tree, set[4], ptr);
3755 check_insert(&tree, set[5], &tree); /* 1001 */
3756 check_load(&tree, set[0], ptr);
3757 check_load(&tree, set[1], &tree);
3758 check_load(&tree, set[2], ptr);
3759 check_load(&tree, set[3], &tree);
3760 check_load(&tree, set[4], ptr);
3761 check_load(&tree, set[5], &tree);
3762 check_insert(&tree, set[6], ptr);
3763 check_load(&tree, set[0], ptr);
3764 check_load(&tree, set[1], &tree);
3765 check_load(&tree, set[2], ptr);
3766 check_load(&tree, set[3], &tree);
3767 check_load(&tree, set[4], ptr);
3768 check_load(&tree, set[5], &tree);
3769 check_load(&tree, set[6], ptr);
3770 check_insert(&tree, set[7], &tree);
3771 check_load(&tree, set[0], ptr);
3772 check_insert(&tree, set[8], ptr);
3774 check_insert(&tree, set[9], &tree);
3776 check_load(&tree, set[0], ptr);
3777 check_load(&tree, set[1], &tree);
3778 check_load(&tree, set[2], ptr);
3779 check_load(&tree, set[3], &tree);
3780 check_load(&tree, set[4], ptr);
3781 check_load(&tree, set[5], &tree);
3782 check_load(&tree, set[6], ptr);
3783 check_load(&tree, set[9], &tree);
3784 mtree_destroy(&tree);
3786 mt_init_flags(&tree, 0);
3787 check_seq(&tree, 16, false);
3788 mtree_destroy(&tree);
3790 mt_init_flags(&tree, 0);
3791 check_seq(&tree, 1000, true);
3792 mtree_destroy(&tree);
3794 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3795 check_rev_seq(&tree, 1000, true);
3796 mtree_destroy(&tree);
3798 check_lower_bound_split(&tree);
3799 check_upper_bound_split(&tree);
3800 check_mid_split(&tree);
3802 mt_init_flags(&tree, 0);
3803 check_next_entry(&tree);
3805 check_find_2(&tree);
3806 mtree_destroy(&tree);
3808 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3809 check_prev_entry(&tree);
3810 mtree_destroy(&tree);
3812 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3813 check_gap_combining(&tree);
3814 mtree_destroy(&tree);
3816 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3817 check_node_overwrite(&tree);
3818 mtree_destroy(&tree);
3820 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3821 next_prev_test(&tree);
3822 mtree_destroy(&tree);
3824 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3825 check_spanning_relatives(&tree);
3826 mtree_destroy(&tree);
3828 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3829 check_rev_find(&tree);
3830 mtree_destroy(&tree);
3832 mt_init_flags(&tree, 0);
3833 check_fuzzer(&tree);
3834 mtree_destroy(&tree);
3836 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3838 mtree_destroy(&tree);
3840 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3841 check_bnode_min_spanning(&tree);
3842 mtree_destroy(&tree);
3844 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3845 check_empty_area_window(&tree);
3846 mtree_destroy(&tree);
3848 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3849 check_empty_area_fill(&tree);
3850 mtree_destroy(&tree);
3852 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3853 check_state_handling(&tree);
3854 mtree_destroy(&tree);
3860 pr_info("maple_tree: %u of %u tests passed\n",
3861 atomic_read(&maple_tree_tests_passed),
3862 atomic_read(&maple_tree_tests_run));
3863 if (atomic_read(&maple_tree_tests_run) ==
3864 atomic_read(&maple_tree_tests_passed))
3870 static void __exit maple_tree_harvest(void)
3875 module_init(maple_tree_seed);
3876 module_exit(maple_tree_harvest);
3877 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3878 MODULE_LICENSE("GPL");