1 // SPDX-License-Identifier: GPL-2.0+
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
7 * Any tests that only require the interface of the tree.
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
13 #define MTREE_ALLOC_MAX 0x2000000000000Ul
14 #define CONFIG_MAPLE_SEARCH
15 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
17 #ifndef CONFIG_DEBUG_MAPLE_TREE
18 #define mt_dump(mt, fmt) do {} while (0)
19 #define mt_validate(mt) do {} while (0)
20 #define mt_cache_shrink() do {} while (0)
21 #define mas_dump(mas) do {} while (0)
22 #define mas_wr_dump(mas) do {} while (0)
23 atomic_t maple_tree_tests_run;
24 atomic_t maple_tree_tests_passed;
27 #define MT_BUG_ON(__tree, __x) do { \
28 atomic_inc(&maple_tree_tests_run); \
30 pr_info("BUG at %s:%d (%u)\n", \
31 __func__, __LINE__, __x); \
32 pr_info("Pass: %u Run:%u\n", \
33 atomic_read(&maple_tree_tests_passed), \
34 atomic_read(&maple_tree_tests_run)); \
36 atomic_inc(&maple_tree_tests_passed); \
41 /* #define BENCH_SLOT_STORE */
42 /* #define BENCH_NODE_STORE */
43 /* #define BENCH_AWALK */
44 /* #define BENCH_WALK */
45 /* #define BENCH_MT_FOR_EACH */
46 /* #define BENCH_FORK */
47 /* #define BENCH_MAS_FOR_EACH */
48 /* #define BENCH_MAS_PREV */
51 #define mt_set_non_kernel(x) do {} while (0)
52 #define mt_zero_nr_tallocated(x) do {} while (0)
54 #define cond_resched() do {} while (0)
56 static int __init mtree_insert_index(struct maple_tree *mt,
57 unsigned long index, gfp_t gfp)
59 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
62 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
64 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
65 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
68 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
71 return mtree_insert(mt, index, ptr, GFP_KERNEL);
74 static int __init mtree_test_store_range(struct maple_tree *mt,
75 unsigned long start, unsigned long end, void *ptr)
77 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
80 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
83 return mtree_test_store_range(mt, start, start, ptr);
86 static int __init mtree_test_insert_range(struct maple_tree *mt,
87 unsigned long start, unsigned long end, void *ptr)
89 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
92 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
94 return mtree_load(mt, index);
97 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
99 return mtree_erase(mt, index);
102 #if defined(CONFIG_64BIT)
103 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
104 unsigned long start, unsigned long end, unsigned long size,
105 unsigned long expected, int eret, void *ptr)
108 unsigned long result = expected + 1;
111 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
113 MT_BUG_ON(mt, ret != eret);
117 MT_BUG_ON(mt, result != expected);
120 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
121 unsigned long start, unsigned long end, unsigned long size,
122 unsigned long expected, int eret, void *ptr)
125 unsigned long result = expected + 1;
128 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
130 MT_BUG_ON(mt, ret != eret);
134 MT_BUG_ON(mt, result != expected);
138 static noinline void __init check_load(struct maple_tree *mt,
139 unsigned long index, void *ptr)
141 void *ret = mtree_test_load(mt, index);
144 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
145 MT_BUG_ON(mt, ret != ptr);
148 static noinline void __init check_store_range(struct maple_tree *mt,
149 unsigned long start, unsigned long end, void *ptr, int expected)
154 ret = mtree_test_store_range(mt, start, end, ptr);
155 MT_BUG_ON(mt, ret != expected);
160 for (i = start; i <= end; i++)
161 check_load(mt, i, ptr);
164 static noinline void __init check_insert_range(struct maple_tree *mt,
165 unsigned long start, unsigned long end, void *ptr, int expected)
170 ret = mtree_test_insert_range(mt, start, end, ptr);
171 MT_BUG_ON(mt, ret != expected);
176 for (i = start; i <= end; i++)
177 check_load(mt, i, ptr);
180 static noinline void __init check_insert(struct maple_tree *mt,
181 unsigned long index, void *ptr)
185 ret = mtree_test_insert(mt, index, ptr);
186 MT_BUG_ON(mt, ret != 0);
189 static noinline void __init check_dup_insert(struct maple_tree *mt,
190 unsigned long index, void *ptr)
194 ret = mtree_test_insert(mt, index, ptr);
195 MT_BUG_ON(mt, ret != -EEXIST);
199 static noinline void __init check_index_load(struct maple_tree *mt,
202 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
205 static inline __init int not_empty(struct maple_node *node)
212 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
220 static noinline void __init check_rev_seq(struct maple_tree *mt,
221 unsigned long max, bool verbose)
223 unsigned long i = max, j;
225 MT_BUG_ON(mt, !mtree_empty(mt));
227 mt_zero_nr_tallocated();
229 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
230 for (j = i; j <= max; j++)
231 check_index_load(mt, j);
233 check_load(mt, i - 1, NULL);
235 MT_BUG_ON(mt, !mt_height(mt));
237 MT_BUG_ON(mt, !mt_height(mt));
240 check_load(mt, max + 1, NULL);
245 mt_dump(mt, mt_dump_dec);
246 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
247 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
253 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
258 MT_BUG_ON(mt, !mtree_empty(mt));
260 mt_zero_nr_tallocated();
261 for (i = 0; i <= max; i++) {
262 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
263 for (j = 0; j <= i; j++)
264 check_index_load(mt, j);
267 MT_BUG_ON(mt, !mt_height(mt));
268 check_load(mt, i + 1, NULL);
274 mt_dump(mt, mt_dump_dec);
275 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
276 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
282 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
285 unsigned long huge = 4000UL * 1000 * 1000;
290 check_insert(mt, i, (void *) i);
291 for (j = huge; j >= i; j /= 2) {
292 check_load(mt, j-1, NULL);
293 check_load(mt, j, (void *) j);
294 check_load(mt, j+1, NULL);
301 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
303 MT_BUG_ON(mt, !mtree_empty(mt));
304 check_lb_not_empty(mt);
307 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
312 MT_BUG_ON(mt, !mtree_empty(mt));
317 huge = 4000UL * 1000 * 1000;
321 check_insert(mt, i, (void *) i);
322 for (j = i; j >= huge; j *= 2) {
323 check_load(mt, j-1, NULL);
324 check_load(mt, j, (void *) j);
325 check_load(mt, j+1, NULL);
332 static noinline void __init check_mid_split(struct maple_tree *mt)
334 unsigned long huge = 8000UL * 1000 * 1000;
336 check_insert(mt, huge, (void *) huge);
337 check_insert(mt, 0, xa_mk_value(0));
338 check_lb_not_empty(mt);
341 static noinline void __init check_rev_find(struct maple_tree *mt)
343 int i, nr_entries = 200;
345 MA_STATE(mas, mt, 0, 0);
347 for (i = 0; i <= nr_entries; i++)
348 mtree_store_range(mt, i*10, i*10 + 5,
349 xa_mk_value(i), GFP_KERNEL);
353 val = mas_find_rev(&mas, 1000);
354 MT_BUG_ON(mt, val != xa_mk_value(100));
355 val = mas_find_rev(&mas, 1000);
356 MT_BUG_ON(mt, val != NULL);
359 val = mas_find_rev(&mas, 997);
360 MT_BUG_ON(mt, val != NULL);
363 val = mas_find_rev(&mas, 900);
364 MT_BUG_ON(mt, val != xa_mk_value(100));
365 val = mas_find_rev(&mas, 900);
366 MT_BUG_ON(mt, val != xa_mk_value(99));
369 val = mas_find_rev(&mas, 0);
370 MT_BUG_ON(mt, val != xa_mk_value(2));
371 val = mas_find_rev(&mas, 0);
372 MT_BUG_ON(mt, val != xa_mk_value(1));
373 val = mas_find_rev(&mas, 0);
374 MT_BUG_ON(mt, val != xa_mk_value(0));
375 val = mas_find_rev(&mas, 0);
376 MT_BUG_ON(mt, val != NULL);
380 static noinline void __init check_find(struct maple_tree *mt)
382 unsigned long val = 0;
386 unsigned long last = 0, index = 0;
387 void *entry, *entry2;
389 MA_STATE(mas, mt, 0, 0);
392 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
394 #if defined(CONFIG_64BIT)
395 top = 4398046511104UL;
406 for (int i = 0; i <= count; i++) {
408 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
410 MT_BUG_ON(mt, mtree_insert(mt, val,
411 XA_ZERO_ENTRY, GFP_KERNEL));
419 while ((entry = mas_find(&mas, 268435456)) != NULL) {
421 MT_BUG_ON(mt, xa_mk_value(val) != entry);
423 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
426 /* For zero check. */
435 mas_for_each(&mas, entry, ULONG_MAX) {
437 MT_BUG_ON(mt, xa_mk_value(val) != entry);
439 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
441 /* For zero check. */
451 mas_for_each(&mas, entry, ULONG_MAX) {
453 MT_BUG_ON(mt, xa_mk_value(val) != entry);
455 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
457 /* For zero check. */
468 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
469 mt_for_each(mt, entry, index, max) {
470 MT_BUG_ON(mt, xa_mk_value(val) != entry);
472 if (val == 64) /* Skip zero entry. */
474 /* For zero check. */
482 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
483 mt_for_each(mt, entry, index, ULONG_MAX) {
485 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
487 MT_BUG_ON(mt, xa_mk_value(val) != entry);
489 /* Workaround for 32bit */
490 if ((val << 2) < val)
495 if (val == 64) /* Skip zero entry. */
497 /* For zero check. */
501 MT_BUG_ON(mt, max > 25);
503 mtree_erase_index(mt, ULONG_MAX);
507 entry = mt_find(mt, &index, 512);
508 MT_BUG_ON(mt, xa_mk_value(256) != entry);
512 entry = mt_find(mt, &index, 20);
513 MT_BUG_ON(mt, entry != NULL);
517 /* Insert ULONG_MAX */
518 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
523 mas_for_each(&mas, entry, ULONG_MAX) {
525 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
527 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
529 MT_BUG_ON(mt, xa_mk_value(val) != entry);
531 /* Workaround for 32bit */
532 if ((val << 2) < val)
537 /* For zero check. */
546 mas_set(&mas, 1048576);
548 entry = mas_find(&mas, 1048576);
550 MT_BUG_ON(mas.tree, entry == NULL);
554 * 1. get the expected value, leveraging the existence of an end entry
555 * 2. delete end entry
556 * 3. find the last value but searching for ULONG_MAX and then using
559 /* First, get the expected result. */
562 mas.index = ULONG_MAX; /* start at max.. */
563 entry = mas_find(&mas, ULONG_MAX);
564 entry = mas_prev(&mas, 0);
568 /* Erase the last entry. */
570 mas.index = ULONG_MAX;
571 mas.last = ULONG_MAX;
574 /* Get the previous value from MAS_START */
576 entry2 = mas_prev(&mas, 0);
579 MT_BUG_ON(mt, entry != entry2);
580 MT_BUG_ON(mt, index != mas.index);
581 MT_BUG_ON(mt, last != mas.last);
585 mas.index = ULONG_MAX;
586 mas.last = ULONG_MAX;
587 entry2 = mas_prev(&mas, 0);
588 MT_BUG_ON(mt, entry != entry2);
591 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
597 static noinline void __init check_find_2(struct maple_tree *mt)
602 MA_STATE(mas, mt, 0, 0);
604 mas_for_each(&mas, entry, ULONG_MAX)
608 for (i = 0; i < 256; i++) {
609 mtree_insert_index(mt, i, GFP_KERNEL);
613 mas_for_each(&mas, entry, ULONG_MAX) {
614 MT_BUG_ON(mt, entry != xa_mk_value(j));
618 MT_BUG_ON(mt, j != i + 1);
621 for (i = 0; i < 256; i++) {
622 mtree_erase_index(mt, i);
626 mas_for_each(&mas, entry, ULONG_MAX) {
627 if (xa_is_zero(entry))
630 MT_BUG_ON(mt, entry != xa_mk_value(j));
634 MT_BUG_ON(mt, j != 256);
637 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
641 #if defined(CONFIG_64BIT)
642 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
646 * cat /proc/self/maps | awk '{print $1}'|
647 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
650 static const unsigned long range[] = {
651 /* Inclusive , Exclusive. */
652 0x565234af2000, 0x565234af4000,
653 0x565234af4000, 0x565234af9000,
654 0x565234af9000, 0x565234afb000,
655 0x565234afc000, 0x565234afd000,
656 0x565234afd000, 0x565234afe000,
657 0x565235def000, 0x565235e10000,
658 0x7f36d4bfd000, 0x7f36d4ee2000,
659 0x7f36d4ee2000, 0x7f36d4f04000,
660 0x7f36d4f04000, 0x7f36d504c000,
661 0x7f36d504c000, 0x7f36d5098000,
662 0x7f36d5098000, 0x7f36d5099000,
663 0x7f36d5099000, 0x7f36d509d000,
664 0x7f36d509d000, 0x7f36d509f000,
665 0x7f36d509f000, 0x7f36d50a5000,
666 0x7f36d50b9000, 0x7f36d50db000,
667 0x7f36d50db000, 0x7f36d50dc000,
668 0x7f36d50dc000, 0x7f36d50fa000,
669 0x7f36d50fa000, 0x7f36d5102000,
670 0x7f36d5102000, 0x7f36d5103000,
671 0x7f36d5103000, 0x7f36d5104000,
672 0x7f36d5104000, 0x7f36d5105000,
673 0x7fff5876b000, 0x7fff5878d000,
674 0x7fff5878e000, 0x7fff58791000,
675 0x7fff58791000, 0x7fff58793000,
678 static const unsigned long holes[] = {
680 * Note: start of hole is INCLUSIVE
681 * end of hole is EXCLUSIVE
682 * (opposite of the above table.)
683 * Start of hole, end of hole, size of hole (+1)
685 0x565234afb000, 0x565234afc000, 0x1000,
686 0x565234afe000, 0x565235def000, 0x12F1000,
687 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
691 * req_range consists of 4 values.
695 * 4. number that should be returned.
698 static const unsigned long req_range[] = {
699 0x565234af9000, /* Min */
700 0x7fff58791000, /* Max */
702 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
703 0, /* Return value success. */
706 0x565234AF0 << 12, /* Max */
708 0x565234AEE << 12, /* max - 3. */
709 0, /* Return value success. */
714 562949953421311 << 12,/* First rev hole of size 0x1000 */
715 0, /* Return value success. */
718 0x7F36D5109 << 12, /* Max */
720 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
721 0, /* Return value success. */
732 18446744073709551615UL,
733 562915594369134UL << 12,
737 /* Single space test. */
745 int i, range_count = ARRAY_SIZE(range);
746 int req_range_count = ARRAY_SIZE(req_range);
747 unsigned long min = 0;
749 MA_STATE(mas, mt, 0, 0);
751 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
753 #define DEBUG_REV_RANGE 0
754 for (i = 0; i < range_count; i += 2) {
755 /* Inclusive, Inclusive (with the -1) */
758 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
759 (range[i + 1] >> 12) - 1);
761 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
762 xa_mk_value(range[i] >> 12), 0);
768 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
770 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
771 min, holes[i+1]>>12, holes[i+2]>>12,
774 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
778 pr_debug("Found %lu %lu\n", mas.index, mas.last);
779 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
782 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
783 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
784 min = holes[i+1] >> 12;
789 for (i = 0; i < req_range_count; i += 5) {
791 pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
792 i, req_range[i] >> 12,
793 (req_range[i + 1] >> 12),
794 req_range[i+2] >> 12,
795 req_range[i+3] >> 12);
797 check_mtree_alloc_rrange(mt,
798 req_range[i] >> 12, /* start */
799 req_range[i+1] >> 12, /* end */
800 req_range[i+2] >> 12, /* size */
801 req_range[i+3] >> 12, /* expected address */
802 req_range[i+4], /* expected return */
803 xa_mk_value(req_range[i] >> 12)); /* pointer */
807 mt_set_non_kernel(1);
808 mtree_erase(mt, 34148798727); /* create a deleted range. */
809 mtree_erase(mt, 34148798725);
810 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
816 static noinline void __init check_alloc_range(struct maple_tree *mt)
820 * cat /proc/self/maps|awk '{print $1}'|
821 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
824 static const unsigned long range[] = {
825 /* Inclusive , Exclusive. */
826 0x565234af2000, 0x565234af4000,
827 0x565234af4000, 0x565234af9000,
828 0x565234af9000, 0x565234afb000,
829 0x565234afc000, 0x565234afd000,
830 0x565234afd000, 0x565234afe000,
831 0x565235def000, 0x565235e10000,
832 0x7f36d4bfd000, 0x7f36d4ee2000,
833 0x7f36d4ee2000, 0x7f36d4f04000,
834 0x7f36d4f04000, 0x7f36d504c000,
835 0x7f36d504c000, 0x7f36d5098000,
836 0x7f36d5098000, 0x7f36d5099000,
837 0x7f36d5099000, 0x7f36d509d000,
838 0x7f36d509d000, 0x7f36d509f000,
839 0x7f36d509f000, 0x7f36d50a5000,
840 0x7f36d50b9000, 0x7f36d50db000,
841 0x7f36d50db000, 0x7f36d50dc000,
842 0x7f36d50dc000, 0x7f36d50fa000,
843 0x7f36d50fa000, 0x7f36d5102000,
844 0x7f36d5102000, 0x7f36d5103000,
845 0x7f36d5103000, 0x7f36d5104000,
846 0x7f36d5104000, 0x7f36d5105000,
847 0x7fff5876b000, 0x7fff5878d000,
848 0x7fff5878e000, 0x7fff58791000,
849 0x7fff58791000, 0x7fff58793000,
851 static const unsigned long holes[] = {
852 /* Start of hole, end of hole, size of hole (+1) */
853 0x565234afb000, 0x565234afc000, 0x1000,
854 0x565234afe000, 0x565235def000, 0x12F1000,
855 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
859 * req_range consists of 4 values.
863 * 4. number that should be returned.
866 static const unsigned long req_range[] = {
867 0x565234af9000, /* Min */
868 0x7fff58791000, /* Max */
870 0x565234afb000, /* First hole in our data of size 1000. */
871 0, /* Return value success. */
874 0x7fff58791000, /* Max */
876 0x0, /* First hole in our data of size 2000. */
877 0, /* Return value success. */
880 34148797436 << 12, /* Min */
881 0x7fff587AF000, /* Max */
883 34148798629 << 12, /* Expected location */
884 0, /* Return value success. */
887 34148798623 << 12, /* Min */
888 34148798683 << 12, /* Max */
890 0, /* Expected location */
891 -EBUSY, /* Return value failed. */
893 /* Test filling entire gap. */
894 34148798623 << 12, /* Min */
895 0x7fff587AF000, /* Max */
897 34148798632 << 12, /* Expected location */
898 0, /* Return value success. */
900 /* Test walking off the end of root. */
904 0, /* Expected location */
905 -EBUSY, /* Return value failure. */
907 /* Test looking for too large a hole across entire range. */
910 4503599618982063UL << 12, /* Size */
911 34359052178 << 12, /* Expected location */
912 -EBUSY, /* Return failure. */
914 /* Test a single entry */
915 34148798648 << 12, /* Min */
916 34148798648 << 12, /* Max */
917 4096, /* Size of 1 */
918 34148798648 << 12, /* Location is the same as min/max */
921 int i, range_count = ARRAY_SIZE(range);
922 int req_range_count = ARRAY_SIZE(req_range);
923 unsigned long min = 0x565234af2000;
924 MA_STATE(mas, mt, 0, 0);
926 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
928 for (i = 0; i < range_count; i += 2) {
929 #define DEBUG_ALLOC_RANGE 0
930 #if DEBUG_ALLOC_RANGE
931 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
932 (range[i + 1] >> 12) - 1);
933 mt_dump(mt, mt_dump_hex);
935 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
936 xa_mk_value(range[i] >> 12), 0);
943 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
945 #if DEBUG_ALLOC_RANGE
946 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
947 holes[i+1] >> 12, holes[i+2] >> 12,
950 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
953 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
958 for (i = 0; i < req_range_count; i += 5) {
959 #if DEBUG_ALLOC_RANGE
960 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
961 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
962 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
963 req_range[i], req_range[i+1]);
965 check_mtree_alloc_range(mt,
966 req_range[i] >> 12, /* start */
967 req_range[i+1] >> 12, /* end */
968 req_range[i+2] >> 12, /* size */
969 req_range[i+3] >> 12, /* expected address */
970 req_range[i+4], /* expected return */
971 xa_mk_value(req_range[i] >> 12)); /* pointer */
973 #if DEBUG_ALLOC_RANGE
974 mt_dump(mt, mt_dump_hex);
982 static noinline void __init check_ranges(struct maple_tree *mt)
985 static const unsigned long r[] = {
988 17, 22, /* Overlaps previous range. */
995 MT_BUG_ON(mt, !mtree_empty(mt));
996 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
997 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
998 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
999 MT_BUG_ON(mt, !mt_height(mt));
1001 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1002 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1003 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1004 MT_BUG_ON(mt, !mt_height(mt));
1006 MT_BUG_ON(mt, mt_height(mt));
1008 check_seq(mt, 50, false);
1009 mt_set_non_kernel(4);
1010 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
1011 MT_BUG_ON(mt, !mt_height(mt));
1014 /* Create tree of 1-100 */
1015 check_seq(mt, 100, false);
1017 mt_set_non_kernel(10);
1018 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1019 MT_BUG_ON(mt, !mt_height(mt));
1022 /* Create tree of 1-200 */
1023 check_seq(mt, 200, false);
1025 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026 MT_BUG_ON(mt, !mt_height(mt));
1029 check_seq(mt, 30, false);
1030 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1031 MT_BUG_ON(mt, !mt_height(mt));
1034 /* Overwrite across multiple levels. */
1035 /* Create tree of 1-400 */
1036 check_seq(mt, 400, false);
1037 mt_set_non_kernel(50);
1039 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1040 mt_set_non_kernel(50);
1041 mtree_test_erase(mt, 140);
1042 mtree_test_erase(mt, 141);
1043 mtree_test_erase(mt, 142);
1044 mtree_test_erase(mt, 143);
1045 mtree_test_erase(mt, 130);
1046 mtree_test_erase(mt, 131);
1047 mtree_test_erase(mt, 132);
1048 mtree_test_erase(mt, 133);
1049 mtree_test_erase(mt, 134);
1050 mtree_test_erase(mt, 135);
1051 check_load(mt, r[12], xa_mk_value(r[12]));
1052 check_load(mt, r[13], xa_mk_value(r[12]));
1053 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1054 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1055 check_load(mt, 135, NULL);
1056 check_load(mt, 140, NULL);
1057 mt_set_non_kernel(0);
1058 MT_BUG_ON(mt, !mt_height(mt));
1063 /* Overwrite multiple levels at the end of the tree (slot 7) */
1064 mt_set_non_kernel(50);
1065 check_seq(mt, 400, false);
1066 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1067 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1069 check_load(mt, 346, xa_mk_value(346));
1070 for (i = 347; i <= 352; i++)
1071 check_load(mt, i, xa_mk_value(347));
1072 for (i = 353; i <= 361; i++)
1073 check_load(mt, i, xa_mk_value(353));
1074 check_load(mt, 362, xa_mk_value(362));
1075 mt_set_non_kernel(0);
1076 MT_BUG_ON(mt, !mt_height(mt));
1079 mt_set_non_kernel(50);
1080 check_seq(mt, 400, false);
1081 check_store_range(mt, 352, 364, NULL, 0);
1082 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1083 check_load(mt, 350, xa_mk_value(350));
1084 check_load(mt, 351, xa_mk_value(352));
1085 for (i = 352; i <= 363; i++)
1086 check_load(mt, i, xa_mk_value(352));
1087 check_load(mt, 364, NULL);
1088 check_load(mt, 365, xa_mk_value(365));
1089 mt_set_non_kernel(0);
1090 MT_BUG_ON(mt, !mt_height(mt));
1093 mt_set_non_kernel(5);
1094 check_seq(mt, 400, false);
1095 check_store_range(mt, 352, 364, NULL, 0);
1096 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1097 check_load(mt, 350, xa_mk_value(350));
1098 check_load(mt, 351, xa_mk_value(352));
1099 for (i = 352; i <= 364; i++)
1100 check_load(mt, i, xa_mk_value(352));
1101 check_load(mt, 365, xa_mk_value(365));
1102 mt_set_non_kernel(0);
1103 MT_BUG_ON(mt, !mt_height(mt));
1107 mt_set_non_kernel(50);
1108 check_seq(mt, 400, false);
1109 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1110 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1111 mt_set_non_kernel(0);
1113 MT_BUG_ON(mt, !mt_height(mt));
1116 * Interesting cases:
1117 * 1. Overwrite the end of a node and end in the first entry of the next
1119 * 2. Split a single range
1120 * 3. Overwrite the start of a range
1121 * 4. Overwrite the end of a range
1122 * 5. Overwrite the entire range
1123 * 6. Overwrite a range that causes multiple parent nodes to be
1125 * 7. Overwrite a range that causes multiple parent nodes and part of
1126 * root to be combined
1127 * 8. Overwrite the whole tree
1128 * 9. Try to overwrite the zero entry of an alloc tree.
1129 * 10. Write a range larger than a nodes current pivot
1132 mt_set_non_kernel(50);
1133 for (i = 0; i <= 500; i++) {
1136 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1138 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1139 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1140 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1141 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1142 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1144 mt_set_non_kernel(0);
1146 mt_set_non_kernel(50);
1147 for (i = 0; i <= 500; i++) {
1150 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1152 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1153 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1154 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1155 check_store_range(mt, 2460, 2470, NULL, 0);
1156 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1157 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1158 mt_set_non_kernel(0);
1159 MT_BUG_ON(mt, !mt_height(mt));
1162 /* Check in-place modifications */
1163 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1164 /* Append to the start of last range */
1165 mt_set_non_kernel(50);
1166 for (i = 0; i <= 500; i++) {
1169 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1172 /* Append to the last range without touching any boundaries */
1173 for (i = 0; i < 10; i++) {
1176 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1179 /* Append to the end of last range */
1181 for (i = 0; i < 10; i++) {
1183 MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1184 xa_mk_value(val)) != 0);
1187 /* Overwriting the range and over a part of the next range */
1188 for (i = 10; i < 30; i += 2) {
1191 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1194 /* Overwriting a part of the range and over the next range */
1195 for (i = 50; i < 70; i += 2) {
1198 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1202 * Expand the range, only partially overwriting the previous and
1205 for (i = 100; i < 130; i += 3) {
1208 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1212 * Expand the range, only partially overwriting the previous and
1213 * next ranges, in RCU mode
1216 for (i = 150; i < 180; i += 3) {
1219 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1222 MT_BUG_ON(mt, !mt_height(mt));
1224 mt_set_non_kernel(0);
1227 /* Test rebalance gaps */
1228 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1229 mt_set_non_kernel(50);
1230 for (i = 0; i <= 50; i++) {
1233 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1235 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1236 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1237 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1238 check_store_range(mt, 240, 249, NULL, 0);
1239 mtree_erase(mt, 200);
1240 mtree_erase(mt, 210);
1241 mtree_erase(mt, 220);
1242 mtree_erase(mt, 230);
1243 mt_set_non_kernel(0);
1244 MT_BUG_ON(mt, !mt_height(mt));
1247 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1248 for (i = 0; i <= 500; i++) {
1251 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1253 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1255 MT_BUG_ON(mt, !mt_height(mt));
1258 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1259 for (i = 0; i <= 500; i++) {
1262 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1264 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1265 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1266 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1267 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1268 check_store_range(mt, 4842, 4849, NULL, 0);
1270 MT_BUG_ON(mt, !mt_height(mt));
1273 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1274 for (i = 0; i <= 1300; i++) {
1277 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1278 MT_BUG_ON(mt, mt_height(mt) >= 4);
1280 /* Cause a 3 child split all the way up the tree. */
1281 for (i = 5; i < 215; i += 10)
1282 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1283 for (i = 5; i < 65; i += 10)
1284 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1286 MT_BUG_ON(mt, mt_height(mt) >= 4);
1287 for (i = 5; i < 45; i += 10)
1288 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1290 MT_BUG_ON(mt, mt_height(mt) < 4);
1294 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1295 for (i = 0; i <= 1200; i++) {
1298 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1299 MT_BUG_ON(mt, mt_height(mt) >= 4);
1301 /* Fill parents and leaves before split. */
1302 for (i = 5; i < 455; i += 10)
1303 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1305 for (i = 1; i < 16; i++)
1306 check_store_range(mt, 8185 + i, 8185 + i + 1,
1307 xa_mk_value(8185+i), 0);
1308 MT_BUG_ON(mt, mt_height(mt) >= 4);
1309 /* triple split across multiple levels. */
1310 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1312 MT_BUG_ON(mt, mt_height(mt) != 4);
1315 static noinline void __init check_next_entry(struct maple_tree *mt)
1318 unsigned long limit = 30, i = 0;
1319 MA_STATE(mas, mt, i, i);
1321 MT_BUG_ON(mt, !mtree_empty(mt));
1323 check_seq(mt, limit, false);
1326 /* Check the first one and get ma_state in the correct state. */
1327 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1328 for ( ; i <= limit + 1; i++) {
1329 entry = mas_next(&mas, limit);
1331 MT_BUG_ON(mt, entry != NULL);
1333 MT_BUG_ON(mt, xa_mk_value(i) != entry);
1339 static noinline void __init check_prev_entry(struct maple_tree *mt)
1341 unsigned long index = 16;
1345 MA_STATE(mas, mt, index, index);
1347 MT_BUG_ON(mt, !mtree_empty(mt));
1348 check_seq(mt, 30, false);
1351 value = mas_find(&mas, ULONG_MAX);
1352 MT_BUG_ON(mt, value != xa_mk_value(index));
1353 value = mas_prev(&mas, 0);
1354 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1358 /* Check limits on prev */
1359 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1361 for (i = 0; i <= index; i++) {
1362 mas_set_range(&mas, i*10, i*10+5);
1363 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1367 value = mas_walk(&mas);
1368 MT_BUG_ON(mt, value != xa_mk_value(2));
1370 value = mas_prev(&mas, 19);
1371 MT_BUG_ON(mt, value != NULL);
1374 value = mas_walk(&mas);
1375 MT_BUG_ON(mt, value != xa_mk_value(8));
1377 value = mas_prev(&mas, 76);
1378 MT_BUG_ON(mt, value != NULL);
1383 static noinline void __init check_root_expand(struct maple_tree *mt)
1385 MA_STATE(mas, mt, 0, 0);
1391 ptr = mas_walk(&mas);
1392 MT_BUG_ON(mt, mas.index != 0);
1393 MT_BUG_ON(mt, ptr != NULL);
1394 MT_BUG_ON(mt, mas.index != 0);
1395 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1397 ptr = &check_prev_entry;
1399 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1402 ptr = mas_walk(&mas);
1403 MT_BUG_ON(mt, ptr != NULL);
1406 ptr = mas_walk(&mas);
1407 MT_BUG_ON(mt, ptr != &check_prev_entry);
1410 ptr = mas_walk(&mas);
1411 MT_BUG_ON(mt, ptr != NULL);
1416 mt_init_flags(mt, 0);
1420 ptr = &check_prev_entry;
1421 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1424 ptr = mas_walk(&mas);
1425 MT_BUG_ON(mt, ptr != NULL);
1426 MT_BUG_ON(mt, mas.index != 1);
1427 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1429 mas_set_range(&mas, 0, 100);
1430 ptr = mas_walk(&mas);
1431 MT_BUG_ON(mt, ptr != &check_prev_entry);
1432 MT_BUG_ON(mt, mas.last != 0);
1436 mt_init_flags(mt, 0);
1440 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1441 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1442 ptr = mas_next(&mas, ULONG_MAX);
1443 MT_BUG_ON(mt, ptr != NULL);
1444 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1447 ptr = mas_prev(&mas, 0);
1448 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1449 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1455 mt_init_flags(mt, 0);
1458 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1459 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1460 ptr = mas_next(&mas, ULONG_MAX);
1461 MT_BUG_ON(mt, ptr != NULL);
1462 MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1465 ptr = mas_prev(&mas, 0);
1466 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1467 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1473 static noinline void __init check_gap_combining(struct maple_tree *mt)
1475 struct maple_enode *mn1, *mn2;
1477 unsigned long singletons = 100;
1478 static const unsigned long *seq100;
1479 static const unsigned long seq100_64[] = {
1493 static const unsigned long seq100_32[] = {
1507 static const unsigned long seq2000[] = {
1511 static const unsigned long seq400[] = {
1513 256, 260, 266, 270, 275, 280, 290, 398,
1517 unsigned long index;
1519 MA_STATE(mas, mt, 0, 0);
1527 mas_set(&mas, index);
1528 MT_BUG_ON(mt, !mtree_empty(mt));
1529 check_seq(mt, singletons, false); /* create 100 singletons. */
1531 mt_set_non_kernel(1);
1532 mtree_test_erase(mt, seq100[2]);
1533 check_load(mt, seq100[2], NULL);
1534 mtree_test_erase(mt, seq100[1]);
1535 check_load(mt, seq100[1], NULL);
1538 entry = mas_find(&mas, ULONG_MAX);
1539 MT_BUG_ON(mt, entry != xa_mk_value(index));
1541 mas_next(&mas, ULONG_MAX);
1542 entry = mas_next(&mas, ULONG_MAX);
1543 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1545 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1548 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1549 * seq100[4]. Search for the gap.
1551 mt_set_non_kernel(1);
1553 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1555 MT_BUG_ON(mt, mas.index != index + 1);
1558 mtree_test_erase(mt, seq100[6]);
1559 check_load(mt, seq100[6], NULL);
1560 mtree_test_erase(mt, seq100[7]);
1561 check_load(mt, seq100[7], NULL);
1562 mtree_test_erase(mt, seq100[8]);
1569 entry = mas_find(&mas, ULONG_MAX);
1570 MT_BUG_ON(mt, entry != xa_mk_value(index));
1572 entry = mas_next(&mas, ULONG_MAX);
1573 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1574 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1576 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1579 * At this point, there is a gap of 3 at seq100[6]. Find it by
1580 * searching 20 - 50 for size 3.
1583 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1585 MT_BUG_ON(mt, mas.index != seq100[6]);
1588 mt_set_non_kernel(1);
1589 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1590 check_load(mt, seq100[13], NULL);
1591 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1592 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1593 check_load(mt, seq100[13], NULL);
1594 check_load(mt, seq100[14], NULL);
1598 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1600 MT_BUG_ON(mt, mas.index != seq100[13]);
1605 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1608 mt_set_non_kernel(2);
1609 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1610 mtree_test_erase(mt, seq100[15]);
1613 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1616 MT_BUG_ON(mt, mas.index != seq100[18]);
1620 /* seq 2000 tests are for multi-level tree gaps */
1621 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1622 check_seq(mt, 2000, false);
1623 mt_set_non_kernel(1);
1624 mtree_test_erase(mt, seq2000[0]);
1625 mtree_test_erase(mt, seq2000[1]);
1627 mt_set_non_kernel(2);
1630 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1632 MT_BUG_ON(mt, mas.index != seq2000[1]);
1637 /* seq 400 tests rebalancing over two levels. */
1638 mt_set_non_kernel(99);
1639 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1640 check_seq(mt, 400, false);
1641 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1642 mt_set_non_kernel(0);
1645 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1646 check_seq(mt, 400, false);
1647 mt_set_non_kernel(50);
1648 mtree_test_store_range(mt, seq400[2], seq400[9],
1649 xa_mk_value(seq400[2]));
1650 mtree_test_store_range(mt, seq400[3], seq400[9],
1651 xa_mk_value(seq400[3]));
1652 mtree_test_store_range(mt, seq400[4], seq400[9],
1653 xa_mk_value(seq400[4]));
1654 mtree_test_store_range(mt, seq400[5], seq400[9],
1655 xa_mk_value(seq400[5]));
1656 mtree_test_store_range(mt, seq400[0], seq400[9],
1657 xa_mk_value(seq400[0]));
1658 mtree_test_store_range(mt, seq400[6], seq400[9],
1659 xa_mk_value(seq400[6]));
1660 mtree_test_store_range(mt, seq400[7], seq400[9],
1661 xa_mk_value(seq400[7]));
1662 mtree_test_store_range(mt, seq400[8], seq400[9],
1663 xa_mk_value(seq400[8]));
1664 mtree_test_store_range(mt, seq400[10], seq400[11],
1665 xa_mk_value(seq400[10]));
1667 mt_set_non_kernel(0);
1670 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1674 for (i = 0; i < max; i++)
1675 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1677 mtree_test_store_range(mt, 319951, 367950, NULL);
1678 /*mt_dump(mt, mt_dump_dec); */
1682 #if defined(BENCH_SLOT_STORE)
1683 static noinline void __init bench_slot_store(struct maple_tree *mt)
1685 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1687 for (i = 0; i < max; i += 10)
1688 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1690 for (i = 0; i < count; i++) {
1691 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1692 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1698 #if defined(BENCH_NODE_STORE)
1699 static noinline void __init bench_node_store(struct maple_tree *mt)
1701 int i, overwrite = 76, max = 240, count = 20000000;
1703 for (i = 0; i < max; i += 10)
1704 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1706 for (i = 0; i < count; i++) {
1707 mtree_store_range(mt, overwrite, overwrite + 15,
1708 xa_mk_value(overwrite), GFP_KERNEL);
1711 if (overwrite >= 135)
1717 #if defined(BENCH_AWALK)
1718 static noinline void __init bench_awalk(struct maple_tree *mt)
1720 int i, max = 2500, count = 50000000;
1721 MA_STATE(mas, mt, 1470, 1470);
1723 for (i = 0; i < max; i += 10)
1724 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1726 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1728 for (i = 0; i < count; i++) {
1729 mas_empty_area_rev(&mas, 0, 2000, 10);
1734 #if defined(BENCH_WALK)
1735 static noinline void __init bench_walk(struct maple_tree *mt)
1737 int i, max = 2500, count = 550000000;
1738 MA_STATE(mas, mt, 1470, 1470);
1740 for (i = 0; i < max; i += 10)
1741 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1743 for (i = 0; i < count; i++) {
1751 #if defined(BENCH_MT_FOR_EACH)
1752 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1754 int i, count = 1000000;
1755 unsigned long max = 2500, index = 0;
1758 for (i = 0; i < max; i += 5)
1759 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1761 for (i = 0; i < count; i++) {
1762 unsigned long j = 0;
1764 mt_for_each(mt, entry, index, max) {
1765 MT_BUG_ON(mt, entry != xa_mk_value(j));
1775 #if defined(BENCH_MAS_FOR_EACH)
1776 static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1778 int i, count = 1000000;
1779 unsigned long max = 2500;
1781 MA_STATE(mas, mt, 0, 0);
1783 for (i = 0; i < max; i += 5) {
1788 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1792 for (i = 0; i < count; i++) {
1793 unsigned long j = 0;
1795 mas_for_each(&mas, entry, max) {
1796 MT_BUG_ON(mt, entry != xa_mk_value(j));
1805 #if defined(BENCH_MAS_PREV)
1806 static noinline void __init bench_mas_prev(struct maple_tree *mt)
1808 int i, count = 1000000;
1809 unsigned long max = 2500;
1811 MA_STATE(mas, mt, 0, 0);
1813 for (i = 0; i < max; i += 5) {
1818 mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1822 for (i = 0; i < count; i++) {
1823 unsigned long j = 2495;
1825 mas_set(&mas, ULONG_MAX);
1826 while ((entry = mas_prev(&mas, 0)) != NULL) {
1827 MT_BUG_ON(mt, entry != xa_mk_value(j));
1835 /* check_forking - simulate the kernel forking sequence with the tree. */
1836 static noinline void __init check_forking(struct maple_tree *mt)
1839 struct maple_tree newmt;
1840 int i, nr_entries = 134;
1842 MA_STATE(mas, mt, 0, 0);
1843 MA_STATE(newmas, mt, 0, 0);
1845 for (i = 0; i <= nr_entries; i++)
1846 mtree_store_range(mt, i*10, i*10 + 5,
1847 xa_mk_value(i), GFP_KERNEL);
1849 mt_set_non_kernel(99999);
1850 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1851 newmas.tree = &newmt;
1857 if (mas_expected_entries(&newmas, nr_entries)) {
1862 mas_for_each(&mas, val, ULONG_MAX) {
1863 newmas.index = mas.index;
1864 newmas.last = mas.last;
1865 mas_store(&newmas, val);
1868 mas_destroy(&newmas);
1869 mas_unlock(&newmas);
1870 mt_validate(&newmt);
1871 mt_set_non_kernel(0);
1872 mtree_destroy(&newmt);
1875 static noinline void __init check_iteration(struct maple_tree *mt)
1877 int i, nr_entries = 125;
1879 MA_STATE(mas, mt, 0, 0);
1881 for (i = 0; i <= nr_entries; i++)
1882 mtree_store_range(mt, i * 10, i * 10 + 9,
1883 xa_mk_value(i), GFP_KERNEL);
1885 mt_set_non_kernel(99999);
1889 mas_for_each(&mas, val, 925) {
1890 MT_BUG_ON(mt, mas.index != i * 10);
1891 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1892 /* Overwrite end of entry 92 */
1896 mas_store(&mas, val);
1900 /* Ensure mas_find() gets the next value */
1901 val = mas_find(&mas, ULONG_MAX);
1902 MT_BUG_ON(mt, val != xa_mk_value(i));
1906 mas_for_each(&mas, val, 785) {
1907 MT_BUG_ON(mt, mas.index != i * 10);
1908 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1909 /* Overwrite start of entry 78 */
1913 mas_store(&mas, val);
1918 val = mas_find(&mas, ULONG_MAX);
1919 MT_BUG_ON(mt, val != xa_mk_value(i));
1923 mas_for_each(&mas, val, 765) {
1924 MT_BUG_ON(mt, mas.index != i * 10);
1925 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1926 /* Overwrite end of entry 76 and advance to the end */
1930 mas_store(&mas, val);
1934 /* Make sure the next find returns the one after 765, 766-769 */
1935 val = mas_find(&mas, ULONG_MAX);
1936 MT_BUG_ON(mt, val != xa_mk_value(76));
1939 mt_set_non_kernel(0);
1942 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1945 struct maple_tree newmt;
1946 int i, nr_entries = 135;
1948 MA_STATE(mas, mt, 0, 0);
1949 MA_STATE(newmas, mt, 0, 0);
1951 for (i = 0; i <= nr_entries; i++)
1952 mtree_store_range(mt, i*10, i*10 + 5,
1953 xa_mk_value(i), GFP_KERNEL);
1955 mt_set_non_kernel(99999);
1956 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1957 newmas.tree = &newmt;
1962 mas_for_each(&mas, val, ULONG_MAX) {
1963 newmas.index = mas.index;
1964 newmas.last = mas.last;
1965 mas_store_gfp(&newmas, val, GFP_KERNEL);
1967 mas_unlock(&newmas);
1969 mt_validate(&newmt);
1970 mt_set_non_kernel(0);
1971 mtree_destroy(&newmt);
1974 #if defined(BENCH_FORK)
1975 static noinline void __init bench_forking(struct maple_tree *mt)
1978 struct maple_tree newmt;
1979 int i, nr_entries = 134, nr_fork = 80000;
1981 MA_STATE(mas, mt, 0, 0);
1982 MA_STATE(newmas, mt, 0, 0);
1984 for (i = 0; i <= nr_entries; i++)
1985 mtree_store_range(mt, i*10, i*10 + 5,
1986 xa_mk_value(i), GFP_KERNEL);
1988 for (i = 0; i < nr_fork; i++) {
1989 mt_set_non_kernel(99999);
1990 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1991 newmas.tree = &newmt;
1998 if (mas_expected_entries(&newmas, nr_entries)) {
2002 mas_for_each(&mas, val, ULONG_MAX) {
2003 newmas.index = mas.index;
2004 newmas.last = mas.last;
2005 mas_store(&newmas, val);
2007 mas_destroy(&newmas);
2008 mas_unlock(&newmas);
2010 mt_validate(&newmt);
2011 mt_set_non_kernel(0);
2012 mtree_destroy(&newmt);
2017 static noinline void __init next_prev_test(struct maple_tree *mt)
2021 MA_STATE(mas, mt, 0, 0);
2022 struct maple_enode *mn;
2023 static const unsigned long *level2;
2024 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2026 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2028 unsigned long last_index;
2033 last_index = 0x138e;
2040 for (i = 0; i <= nr_entries; i++)
2041 mtree_store_range(mt, i*10, i*10 + 5,
2042 xa_mk_value(i), GFP_KERNEL);
2045 for (i = 0; i <= nr_entries / 2; i++) {
2046 mas_next(&mas, 1000);
2047 if (mas_is_none(&mas))
2054 mas_for_each(&mas, val, 1000) {
2061 mas_for_each(&mas, val, 1000) {
2067 * 680 - 685 = 0x61a00001930c
2069 * 690 - 695 = 0x61a00001930c
2070 * Check simple next/prev
2073 val = mas_walk(&mas);
2074 MT_BUG_ON(mt, val != NULL);
2076 val = mas_next(&mas, 1000);
2077 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2078 MT_BUG_ON(mt, mas.index != 690);
2079 MT_BUG_ON(mt, mas.last != 695);
2081 val = mas_prev(&mas, 0);
2082 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2083 MT_BUG_ON(mt, mas.index != 680);
2084 MT_BUG_ON(mt, mas.last != 685);
2086 val = mas_next(&mas, 1000);
2087 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2088 MT_BUG_ON(mt, mas.index != 690);
2089 MT_BUG_ON(mt, mas.last != 695);
2091 val = mas_next(&mas, 1000);
2092 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2093 MT_BUG_ON(mt, mas.index != 700);
2094 MT_BUG_ON(mt, mas.last != 705);
2096 /* Check across node boundaries of the tree */
2098 val = mas_walk(&mas);
2099 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2100 MT_BUG_ON(mt, mas.index != 70);
2101 MT_BUG_ON(mt, mas.last != 75);
2103 val = mas_next(&mas, 1000);
2104 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2105 MT_BUG_ON(mt, mas.index != 80);
2106 MT_BUG_ON(mt, mas.last != 85);
2108 val = mas_prev(&mas, 70);
2109 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2110 MT_BUG_ON(mt, mas.index != 70);
2111 MT_BUG_ON(mt, mas.last != 75);
2113 /* Check across two levels of the tree */
2115 mas_set(&mas, level2[0]);
2116 val = mas_walk(&mas);
2117 MT_BUG_ON(mt, val != NULL);
2118 val = mas_next(&mas, level2[1]);
2119 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2120 MT_BUG_ON(mt, mas.index != level2[2]);
2121 MT_BUG_ON(mt, mas.last != level2[3]);
2124 val = mas_next(&mas, level2[1]);
2125 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2126 MT_BUG_ON(mt, mas.index != level2[4]);
2127 MT_BUG_ON(mt, mas.last != level2[5]);
2128 MT_BUG_ON(mt, mn == mas.node);
2130 val = mas_prev(&mas, 0);
2131 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2132 MT_BUG_ON(mt, mas.index != level2[2]);
2133 MT_BUG_ON(mt, mas.last != level2[3]);
2135 /* Check running off the end and back on */
2136 mas_set(&mas, nr_entries * 10);
2137 val = mas_walk(&mas);
2138 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2139 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2140 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2142 val = mas_next(&mas, ULONG_MAX);
2143 MT_BUG_ON(mt, val != NULL);
2144 MT_BUG_ON(mt, mas.index != last_index);
2145 MT_BUG_ON(mt, mas.last != ULONG_MAX);
2147 val = mas_prev(&mas, 0);
2148 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2149 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2150 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2152 /* Check running off the start and back on */
2155 val = mas_walk(&mas);
2156 MT_BUG_ON(mt, val != xa_mk_value(1));
2157 MT_BUG_ON(mt, mas.index != 10);
2158 MT_BUG_ON(mt, mas.last != 15);
2160 val = mas_prev(&mas, 0);
2161 MT_BUG_ON(mt, val != xa_mk_value(0));
2162 MT_BUG_ON(mt, mas.index != 0);
2163 MT_BUG_ON(mt, mas.last != 5);
2165 val = mas_prev(&mas, 0);
2166 MT_BUG_ON(mt, val != NULL);
2167 MT_BUG_ON(mt, mas.index != 0);
2168 MT_BUG_ON(mt, mas.last != 5);
2169 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
2173 mas_store(&mas, NULL);
2178 val = mas_prev(&mas, 0);
2179 MT_BUG_ON(mt, val != NULL);
2180 MT_BUG_ON(mt, mas.index != 0);
2181 MT_BUG_ON(mt, mas.last != 9);
2187 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2188 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2191 val = mas_prev(&mas, 4);
2192 MT_BUG_ON(mt, val != NULL);
2198 /* Test spanning writes that require balancing right sibling or right cousin */
2199 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2202 unsigned long i, nr_entries = 1000;
2204 for (i = 0; i <= nr_entries; i++)
2205 mtree_store_range(mt, i*10, i*10 + 5,
2206 xa_mk_value(i), GFP_KERNEL);
2209 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2212 static noinline void __init check_fuzzer(struct maple_tree *mt)
2215 * 1. Causes a spanning rebalance of a single root node.
2216 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2217 * entire right side is consumed.
2219 mtree_test_insert(mt, 88, (void *)0xb1);
2220 mtree_test_insert(mt, 84, (void *)0xa9);
2221 mtree_test_insert(mt, 2, (void *)0x5);
2222 mtree_test_insert(mt, 4, (void *)0x9);
2223 mtree_test_insert(mt, 14, (void *)0x1d);
2224 mtree_test_insert(mt, 7, (void *)0xf);
2225 mtree_test_insert(mt, 12, (void *)0x19);
2226 mtree_test_insert(mt, 18, (void *)0x25);
2227 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2232 * 2. Cause a spanning rebalance of two nodes in root.
2233 * Fixed by setting mast->r->max correctly.
2235 mt_init_flags(mt, 0);
2236 mtree_test_store(mt, 87, (void *)0xaf);
2237 mtree_test_store(mt, 0, (void *)0x1);
2238 mtree_test_load(mt, 4);
2239 mtree_test_insert(mt, 4, (void *)0x9);
2240 mtree_test_store(mt, 8, (void *)0x11);
2241 mtree_test_store(mt, 44, (void *)0x59);
2242 mtree_test_store(mt, 68, (void *)0x89);
2243 mtree_test_store(mt, 2, (void *)0x5);
2244 mtree_test_insert(mt, 43, (void *)0x57);
2245 mtree_test_insert(mt, 24, (void *)0x31);
2246 mtree_test_insert(mt, 844, (void *)0x699);
2247 mtree_test_store(mt, 84, (void *)0xa9);
2248 mtree_test_store(mt, 4, (void *)0x9);
2249 mtree_test_erase(mt, 4);
2250 mtree_test_load(mt, 5);
2251 mtree_test_erase(mt, 0);
2255 * 3. Cause a node overflow on copy
2256 * Fixed by using the correct check for node size in mas_wr_modify()
2257 * Also discovered issue with metadata setting.
2259 mt_init_flags(mt, 0);
2260 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2261 mtree_test_store(mt, 4, (void *)0x9);
2262 mtree_test_erase(mt, 5);
2263 mtree_test_erase(mt, 0);
2264 mtree_test_erase(mt, 4);
2265 mtree_test_store(mt, 5, (void *)0xb);
2266 mtree_test_erase(mt, 5);
2267 mtree_test_store(mt, 5, (void *)0xb);
2268 mtree_test_erase(mt, 5);
2269 mtree_test_erase(mt, 4);
2270 mtree_test_store(mt, 4, (void *)0x9);
2271 mtree_test_store(mt, 444, (void *)0x379);
2272 mtree_test_store(mt, 0, (void *)0x1);
2273 mtree_test_load(mt, 0);
2274 mtree_test_store(mt, 5, (void *)0xb);
2275 mtree_test_erase(mt, 0);
2279 * 4. spanning store failure due to writing incorrect pivot value at
2281 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2284 mt_init_flags(mt, 0);
2285 mtree_test_insert(mt, 261, (void *)0x20b);
2286 mtree_test_store(mt, 516, (void *)0x409);
2287 mtree_test_store(mt, 6, (void *)0xd);
2288 mtree_test_insert(mt, 5, (void *)0xb);
2289 mtree_test_insert(mt, 1256, (void *)0x9d1);
2290 mtree_test_store(mt, 4, (void *)0x9);
2291 mtree_test_erase(mt, 1);
2292 mtree_test_store(mt, 56, (void *)0x71);
2293 mtree_test_insert(mt, 1, (void *)0x3);
2294 mtree_test_store(mt, 24, (void *)0x31);
2295 mtree_test_erase(mt, 1);
2296 mtree_test_insert(mt, 2263, (void *)0x11af);
2297 mtree_test_insert(mt, 446, (void *)0x37d);
2298 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2299 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2303 * 5. mas_wr_extend_null() may overflow slots.
2304 * Fix by checking against wr_mas->node_end.
2306 mt_init_flags(mt, 0);
2307 mtree_test_store(mt, 48, (void *)0x61);
2308 mtree_test_store(mt, 3, (void *)0x7);
2309 mtree_test_load(mt, 0);
2310 mtree_test_store(mt, 88, (void *)0xb1);
2311 mtree_test_store(mt, 81, (void *)0xa3);
2312 mtree_test_insert(mt, 0, (void *)0x1);
2313 mtree_test_insert(mt, 8, (void *)0x11);
2314 mtree_test_insert(mt, 4, (void *)0x9);
2315 mtree_test_insert(mt, 2480, (void *)0x1361);
2316 mtree_test_insert(mt, ULONG_MAX,
2317 (void *)0xffffffffffffffff);
2318 mtree_test_erase(mt, ULONG_MAX);
2322 * 6. When reusing a node with an implied pivot and the node is
2323 * shrinking, old data would be left in the implied slot
2324 * Fixed by checking the last pivot for the mas->max and clear
2325 * accordingly. This only affected the left-most node as that node is
2326 * the only one allowed to end in NULL.
2328 mt_init_flags(mt, 0);
2329 mtree_test_erase(mt, 3);
2330 mtree_test_insert(mt, 22, (void *)0x2d);
2331 mtree_test_insert(mt, 15, (void *)0x1f);
2332 mtree_test_load(mt, 2);
2333 mtree_test_insert(mt, 1, (void *)0x3);
2334 mtree_test_insert(mt, 1, (void *)0x3);
2335 mtree_test_insert(mt, 5, (void *)0xb);
2336 mtree_test_erase(mt, 1);
2337 mtree_test_insert(mt, 1, (void *)0x3);
2338 mtree_test_insert(mt, 4, (void *)0x9);
2339 mtree_test_insert(mt, 1, (void *)0x3);
2340 mtree_test_erase(mt, 1);
2341 mtree_test_insert(mt, 2, (void *)0x5);
2342 mtree_test_insert(mt, 1, (void *)0x3);
2343 mtree_test_erase(mt, 3);
2344 mtree_test_insert(mt, 22, (void *)0x2d);
2345 mtree_test_insert(mt, 15, (void *)0x1f);
2346 mtree_test_insert(mt, 2, (void *)0x5);
2347 mtree_test_insert(mt, 1, (void *)0x3);
2348 mtree_test_insert(mt, 8, (void *)0x11);
2349 mtree_test_load(mt, 2);
2350 mtree_test_insert(mt, 1, (void *)0x3);
2351 mtree_test_insert(mt, 1, (void *)0x3);
2352 mtree_test_store(mt, 1, (void *)0x3);
2353 mtree_test_insert(mt, 5, (void *)0xb);
2354 mtree_test_erase(mt, 1);
2355 mtree_test_insert(mt, 1, (void *)0x3);
2356 mtree_test_insert(mt, 4, (void *)0x9);
2357 mtree_test_insert(mt, 1, (void *)0x3);
2358 mtree_test_erase(mt, 1);
2359 mtree_test_insert(mt, 2, (void *)0x5);
2360 mtree_test_insert(mt, 1, (void *)0x3);
2361 mtree_test_erase(mt, 3);
2362 mtree_test_insert(mt, 22, (void *)0x2d);
2363 mtree_test_insert(mt, 15, (void *)0x1f);
2364 mtree_test_insert(mt, 2, (void *)0x5);
2365 mtree_test_insert(mt, 1, (void *)0x3);
2366 mtree_test_insert(mt, 8, (void *)0x11);
2367 mtree_test_insert(mt, 12, (void *)0x19);
2368 mtree_test_erase(mt, 1);
2369 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2370 mtree_test_erase(mt, 62);
2371 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2372 mtree_test_insert(mt, 11, (void *)0x17);
2373 mtree_test_insert(mt, 3, (void *)0x7);
2374 mtree_test_insert(mt, 3, (void *)0x7);
2375 mtree_test_store(mt, 62, (void *)0x7d);
2376 mtree_test_erase(mt, 62);
2377 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2378 mtree_test_erase(mt, 1);
2379 mtree_test_insert(mt, 22, (void *)0x2d);
2380 mtree_test_insert(mt, 12, (void *)0x19);
2381 mtree_test_erase(mt, 1);
2382 mtree_test_insert(mt, 3, (void *)0x7);
2383 mtree_test_store(mt, 62, (void *)0x7d);
2384 mtree_test_erase(mt, 62);
2385 mtree_test_insert(mt, 122, (void *)0xf5);
2386 mtree_test_store(mt, 3, (void *)0x7);
2387 mtree_test_insert(mt, 0, (void *)0x1);
2388 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2389 mtree_test_insert(mt, 85, (void *)0xab);
2390 mtree_test_insert(mt, 72, (void *)0x91);
2391 mtree_test_insert(mt, 81, (void *)0xa3);
2392 mtree_test_insert(mt, 726, (void *)0x5ad);
2393 mtree_test_insert(mt, 0, (void *)0x1);
2394 mtree_test_insert(mt, 1, (void *)0x3);
2395 mtree_test_store(mt, 51, (void *)0x67);
2396 mtree_test_insert(mt, 611, (void *)0x4c7);
2397 mtree_test_insert(mt, 485, (void *)0x3cb);
2398 mtree_test_insert(mt, 1, (void *)0x3);
2399 mtree_test_erase(mt, 1);
2400 mtree_test_insert(mt, 0, (void *)0x1);
2401 mtree_test_insert(mt, 1, (void *)0x3);
2402 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2403 mtree_test_load(mt, 1);
2404 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2405 mtree_test_insert(mt, 1, (void *)0x3);
2406 mtree_test_erase(mt, 1);
2407 mtree_test_load(mt, 53);
2408 mtree_test_load(mt, 1);
2409 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2410 mtree_test_insert(mt, 222, (void *)0x1bd);
2411 mtree_test_insert(mt, 485, (void *)0x3cb);
2412 mtree_test_insert(mt, 1, (void *)0x3);
2413 mtree_test_erase(mt, 1);
2414 mtree_test_load(mt, 0);
2415 mtree_test_insert(mt, 21, (void *)0x2b);
2416 mtree_test_insert(mt, 3, (void *)0x7);
2417 mtree_test_store(mt, 621, (void *)0x4db);
2418 mtree_test_insert(mt, 0, (void *)0x1);
2419 mtree_test_erase(mt, 5);
2420 mtree_test_insert(mt, 1, (void *)0x3);
2421 mtree_test_store(mt, 62, (void *)0x7d);
2422 mtree_test_erase(mt, 62);
2423 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2424 mtree_test_insert(mt, 22, (void *)0x2d);
2425 mtree_test_insert(mt, 12, (void *)0x19);
2426 mtree_test_erase(mt, 1);
2427 mtree_test_insert(mt, 1, (void *)0x3);
2428 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2429 mtree_test_erase(mt, 62);
2430 mtree_test_erase(mt, 1);
2431 mtree_test_load(mt, 1);
2432 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2433 mtree_test_insert(mt, 1, (void *)0x3);
2434 mtree_test_erase(mt, 1);
2435 mtree_test_load(mt, 53);
2436 mtree_test_load(mt, 1);
2437 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2438 mtree_test_insert(mt, 222, (void *)0x1bd);
2439 mtree_test_insert(mt, 485, (void *)0x3cb);
2440 mtree_test_insert(mt, 1, (void *)0x3);
2441 mtree_test_erase(mt, 1);
2442 mtree_test_insert(mt, 1, (void *)0x3);
2443 mtree_test_load(mt, 0);
2444 mtree_test_load(mt, 0);
2448 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2449 * data by overwriting it first - that way metadata is of no concern.
2451 mt_init_flags(mt, 0);
2452 mtree_test_load(mt, 1);
2453 mtree_test_insert(mt, 102, (void *)0xcd);
2454 mtree_test_erase(mt, 2);
2455 mtree_test_erase(mt, 0);
2456 mtree_test_load(mt, 0);
2457 mtree_test_insert(mt, 4, (void *)0x9);
2458 mtree_test_insert(mt, 2, (void *)0x5);
2459 mtree_test_insert(mt, 110, (void *)0xdd);
2460 mtree_test_insert(mt, 1, (void *)0x3);
2461 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2462 mtree_test_erase(mt, 2);
2463 mtree_test_store(mt, 0, (void *)0x1);
2464 mtree_test_store(mt, 112, (void *)0xe1);
2465 mtree_test_insert(mt, 21, (void *)0x2b);
2466 mtree_test_store(mt, 1, (void *)0x3);
2467 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2468 mtree_test_store(mt, 2, (void *)0x5);
2469 mtree_test_load(mt, 22);
2470 mtree_test_erase(mt, 2);
2471 mtree_test_store(mt, 210, (void *)0x1a5);
2472 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2473 mtree_test_store(mt, 2, (void *)0x5);
2474 mtree_test_erase(mt, 2);
2475 mtree_test_erase(mt, 22);
2476 mtree_test_erase(mt, 1);
2477 mtree_test_erase(mt, 2);
2478 mtree_test_store(mt, 0, (void *)0x1);
2479 mtree_test_load(mt, 112);
2480 mtree_test_insert(mt, 2, (void *)0x5);
2481 mtree_test_erase(mt, 2);
2482 mtree_test_store(mt, 1, (void *)0x3);
2483 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2484 mtree_test_erase(mt, 0);
2485 mtree_test_erase(mt, 2);
2486 mtree_test_store(mt, 2, (void *)0x5);
2487 mtree_test_erase(mt, 0);
2488 mtree_test_erase(mt, 2);
2489 mtree_test_store(mt, 0, (void *)0x1);
2490 mtree_test_store(mt, 0, (void *)0x1);
2491 mtree_test_erase(mt, 2);
2492 mtree_test_store(mt, 2, (void *)0x5);
2493 mtree_test_erase(mt, 2);
2494 mtree_test_insert(mt, 2, (void *)0x5);
2495 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2496 mtree_test_erase(mt, 0);
2497 mtree_test_erase(mt, 2);
2498 mtree_test_store(mt, 0, (void *)0x1);
2499 mtree_test_load(mt, 112);
2500 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2501 mtree_test_store(mt, 2, (void *)0x5);
2502 mtree_test_load(mt, 110);
2503 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2504 mtree_test_load(mt, 2);
2505 mtree_test_store(mt, 2, (void *)0x5);
2506 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2507 mtree_test_erase(mt, 12);
2508 mtree_test_store(mt, 2, (void *)0x5);
2509 mtree_test_load(mt, 22);
2514 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2515 * may be set incorrectly to the final pivot and not the right max.
2516 * Fix by setting the left max to orig right max if the entire node is
2519 mt_init_flags(mt, 0);
2520 mtree_test_store(mt, 6, (void *)0xd);
2521 mtree_test_store(mt, 67, (void *)0x87);
2522 mtree_test_insert(mt, 15, (void *)0x1f);
2523 mtree_test_insert(mt, 6716, (void *)0x3479);
2524 mtree_test_store(mt, 61, (void *)0x7b);
2525 mtree_test_insert(mt, 13, (void *)0x1b);
2526 mtree_test_store(mt, 8, (void *)0x11);
2527 mtree_test_insert(mt, 1, (void *)0x3);
2528 mtree_test_load(mt, 0);
2529 mtree_test_erase(mt, 67167);
2530 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2531 mtree_test_insert(mt, 6, (void *)0xd);
2532 mtree_test_erase(mt, 67);
2533 mtree_test_insert(mt, 1, (void *)0x3);
2534 mtree_test_erase(mt, 667167);
2535 mtree_test_insert(mt, 6, (void *)0xd);
2536 mtree_test_store(mt, 67, (void *)0x87);
2537 mtree_test_insert(mt, 5, (void *)0xb);
2538 mtree_test_erase(mt, 1);
2539 mtree_test_insert(mt, 6, (void *)0xd);
2540 mtree_test_erase(mt, 67);
2541 mtree_test_insert(mt, 15, (void *)0x1f);
2542 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2543 mtree_test_insert(mt, 1, (void *)0x3);
2544 mtree_test_load(mt, 7);
2545 mtree_test_insert(mt, 16, (void *)0x21);
2546 mtree_test_insert(mt, 36, (void *)0x49);
2547 mtree_test_store(mt, 67, (void *)0x87);
2548 mtree_test_store(mt, 6, (void *)0xd);
2549 mtree_test_insert(mt, 367, (void *)0x2df);
2550 mtree_test_insert(mt, 115, (void *)0xe7);
2551 mtree_test_store(mt, 0, (void *)0x1);
2552 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2553 mtree_test_store(mt, 1, (void *)0x3);
2554 mtree_test_erase(mt, 67167);
2555 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2556 mtree_test_store(mt, 1, (void *)0x3);
2557 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2558 mtree_test_load(mt, 67);
2559 mtree_test_insert(mt, 1, (void *)0x3);
2560 mtree_test_erase(mt, 67167);
2564 * 9. spanning store to the end of data caused an invalid metadata
2565 * length which resulted in a crash eventually.
2566 * Fix by checking if there is a value in pivot before incrementing the
2567 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2568 * abstract the two locations this happens into a function called
2569 * mas_leaf_set_meta().
2571 mt_init_flags(mt, 0);
2572 mtree_test_insert(mt, 21, (void *)0x2b);
2573 mtree_test_insert(mt, 12, (void *)0x19);
2574 mtree_test_insert(mt, 6, (void *)0xd);
2575 mtree_test_insert(mt, 8, (void *)0x11);
2576 mtree_test_insert(mt, 2, (void *)0x5);
2577 mtree_test_insert(mt, 91, (void *)0xb7);
2578 mtree_test_insert(mt, 18, (void *)0x25);
2579 mtree_test_insert(mt, 81, (void *)0xa3);
2580 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2581 mtree_test_store(mt, 1, (void *)0x3);
2582 mtree_test_erase(mt, 8);
2583 mtree_test_insert(mt, 11, (void *)0x17);
2584 mtree_test_insert(mt, 8, (void *)0x11);
2585 mtree_test_insert(mt, 21, (void *)0x2b);
2586 mtree_test_insert(mt, 2, (void *)0x5);
2587 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2588 mtree_test_erase(mt, ULONG_MAX - 10);
2589 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2590 mtree_test_erase(mt, 2);
2591 mtree_test_insert(mt, 1211, (void *)0x977);
2592 mtree_test_insert(mt, 111, (void *)0xdf);
2593 mtree_test_insert(mt, 13, (void *)0x1b);
2594 mtree_test_insert(mt, 211, (void *)0x1a7);
2595 mtree_test_insert(mt, 11, (void *)0x17);
2596 mtree_test_insert(mt, 5, (void *)0xb);
2597 mtree_test_insert(mt, 1218, (void *)0x985);
2598 mtree_test_insert(mt, 61, (void *)0x7b);
2599 mtree_test_store(mt, 1, (void *)0x3);
2600 mtree_test_insert(mt, 121, (void *)0xf3);
2601 mtree_test_insert(mt, 8, (void *)0x11);
2602 mtree_test_insert(mt, 21, (void *)0x2b);
2603 mtree_test_insert(mt, 2, (void *)0x5);
2604 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2605 mtree_test_erase(mt, ULONG_MAX - 10);
2608 /* duplicate the tree with a specific gap */
2609 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2610 unsigned long nr_entries, bool zero_start,
2613 unsigned long i = 0;
2614 struct maple_tree newmt;
2617 MA_STATE(mas, mt, 0, 0);
2618 MA_STATE(newmas, &newmt, 0, 0);
2623 mt_zero_nr_tallocated();
2624 for (; i <= nr_entries; i++)
2625 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2626 xa_mk_value(i), GFP_KERNEL);
2628 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2629 mt_set_non_kernel(99999);
2631 ret = mas_expected_entries(&newmas, nr_entries);
2632 mt_set_non_kernel(0);
2633 MT_BUG_ON(mt, ret != 0);
2636 mas_for_each(&mas, tmp, ULONG_MAX) {
2637 newmas.index = mas.index;
2638 newmas.last = mas.last;
2639 mas_store(&newmas, tmp);
2642 mas_destroy(&newmas);
2643 mas_unlock(&newmas);
2645 mtree_destroy(&newmt);
2648 /* Duplicate many sizes of trees. Mainly to test expected entry values */
2649 static noinline void __init check_dup(struct maple_tree *mt)
2652 int big_start = 100010;
2654 /* Check with a value at zero */
2655 for (i = 10; i < 1000; i++) {
2656 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2657 check_dup_gaps(mt, i, true, 5);
2664 /* Check with a value at zero, no gap */
2665 for (i = 1000; i < 2000; i++) {
2666 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2667 check_dup_gaps(mt, i, true, 0);
2674 /* Check with a value at zero and unreasonably large */
2675 for (i = big_start; i < big_start + 10; i++) {
2676 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2677 check_dup_gaps(mt, i, true, 5);
2684 /* Small to medium size not starting at zero*/
2685 for (i = 200; i < 1000; i++) {
2686 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2687 check_dup_gaps(mt, i, false, 5);
2694 /* Unreasonably large not starting at zero*/
2695 for (i = big_start; i < big_start + 10; i++) {
2696 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2697 check_dup_gaps(mt, i, false, 5);
2704 /* Check non-allocation tree not starting at zero */
2705 for (i = 1500; i < 3000; i++) {
2706 mt_init_flags(mt, 0);
2707 check_dup_gaps(mt, i, false, 5);
2716 /* Check non-allocation tree starting at zero */
2717 for (i = 200; i < 1000; i++) {
2718 mt_init_flags(mt, 0);
2719 check_dup_gaps(mt, i, true, 5);
2726 /* Unreasonably large */
2727 for (i = big_start + 5; i < big_start + 10; i++) {
2728 mt_init_flags(mt, 0);
2729 check_dup_gaps(mt, i, true, 5);
2737 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2740 MA_STATE(mas, mt, 0, 0);
2742 mt_set_non_kernel(9999);
2745 mas_set_range(&mas, i*10, i*10+9);
2746 mas_store(&mas, check_bnode_min_spanning);
2749 mas_set_range(&mas, 240, 509);
2750 mas_store(&mas, NULL);
2753 mt_set_non_kernel(0);
2756 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2758 unsigned long i, nr_entries = 20;
2759 MA_STATE(mas, mt, 0, 0);
2761 for (i = 1; i <= nr_entries; i++)
2762 mtree_store_range(mt, i*10, i*10 + 9,
2763 xa_mk_value(i), GFP_KERNEL);
2765 /* Create another hole besides the one at 0 */
2766 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2768 /* Check lower bounds that don't fit */
2770 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2773 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2775 /* Check lower bound that does fit */
2777 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2778 MT_BUG_ON(mt, mas.index != 5);
2779 MT_BUG_ON(mt, mas.last != 9);
2782 /* Check one gap that doesn't fit and one that does */
2785 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2786 MT_BUG_ON(mt, mas.index != 161);
2787 MT_BUG_ON(mt, mas.last != 169);
2789 /* Check one gap that does fit above the min */
2791 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2792 MT_BUG_ON(mt, mas.index != 216);
2793 MT_BUG_ON(mt, mas.last != 218);
2795 /* Check size that doesn't fit any gap */
2797 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2800 * Check size that doesn't fit the lower end of the window but
2804 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2807 * Check size that doesn't fit the upper end of the window but
2811 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2813 /* Check mas_empty_area forward */
2815 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2816 MT_BUG_ON(mt, mas.index != 0);
2817 MT_BUG_ON(mt, mas.last != 8);
2820 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2821 MT_BUG_ON(mt, mas.index != 0);
2822 MT_BUG_ON(mt, mas.last != 3);
2825 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2828 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2831 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2834 mas_empty_area(&mas, 100, 165, 3);
2837 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2841 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2843 const unsigned long max = 0x25D78000;
2846 MA_STATE(mas, mt, 0, 0);
2848 mt_set_non_kernel(99999);
2849 for (shift = 12; shift <= 16; shift++) {
2855 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2856 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2857 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2863 /* No space left. */
2866 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2869 /* Fill a depth 3 node to the maximum */
2870 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2871 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2872 /* Make space in the second-last depth 4 node */
2873 mtree_erase(mt, 631668735);
2874 /* Make space in the last depth 4 node */
2875 mtree_erase(mt, 629506047);
2877 /* Search from just after the gap in the second-last depth 4 */
2879 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2881 mt_set_non_kernel(0);
2885 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
2887 * The table below shows the single entry tree (0-0 pointer) and normal tree
2890 * Function ENTRY Start Result index & last
2891 * ┬ ┬ ┬ ┬ ┬
2892 * │ │ │ │ └─ the final range
2893 * │ │ │ └─ The node value after execution
2894 * │ │ └─ The node value before execution
2895 * │ └─ If the entry exists or does not exists (DNE)
2896 * └─ The function name
2898 * Function ENTRY Start Result index & last
2901 * Single entry tree at 0-0
2902 * ------------------------
2903 * DNE MAS_START MAS_NONE 1 - oo
2904 * DNE MAS_PAUSE MAS_NONE 1 - oo
2905 * DNE MAS_ROOT MAS_NONE 1 - oo
2907 * DNE MAS_NONE MAS_ROOT 0
2909 * DNE MAS_NONE MAS_NONE 1 - oo
2913 * exists MAS_START active range
2914 * DNE MAS_START active set to last range
2915 * exists MAS_PAUSE active range
2916 * DNE MAS_PAUSE active set to last range
2917 * exists MAS_NONE active range
2918 * exists active active range
2919 * DNE active active set to last range
2920 * ERANGE active MAS_OVERFLOW last range
2922 * Function ENTRY Start Result index & last
2925 * Single entry tree at 0-0
2926 * ------------------------
2928 * exists MAS_START MAS_ROOT 0
2929 * exists MAS_PAUSE MAS_ROOT 0
2930 * exists MAS_NONE MAS_ROOT 0
2933 * DNE MAS_START MAS_NONE 0
2934 * DNE MAS_PAUSE MAS_NONE 0
2935 * DNE MAS_NONE MAS_NONE 0
2936 * DNE MAS_ROOT MAS_NONE 0
2940 * exists MAS_START active range
2941 * DNE MAS_START active set to min
2942 * exists MAS_PAUSE active range
2943 * DNE MAS_PAUSE active set to min
2944 * exists MAS_NONE active range
2945 * DNE MAS_NONE MAS_NONE set to min
2946 * any MAS_ROOT MAS_NONE 0
2947 * exists active active range
2948 * DNE active active last range
2949 * ERANGE active MAS_UNDERFLOW last range
2951 * Function ENTRY Start Result index & last
2953 * - at index or next
2954 * Single entry tree at 0-0
2955 * ------------------------
2957 * DNE MAS_START MAS_NONE 0
2958 * DNE MAS_PAUSE MAS_NONE 0
2959 * DNE MAS_ROOT MAS_NONE 0
2960 * DNE MAS_NONE MAS_NONE 1
2962 * exists MAS_START MAS_ROOT 0
2963 * exists MAS_PAUSE MAS_ROOT 0
2964 * exists MAS_NONE MAS_ROOT 0
2968 * exists MAS_START active range
2969 * DNE MAS_START active set to max
2970 * exists MAS_PAUSE active range
2971 * DNE MAS_PAUSE active set to max
2972 * exists MAS_NONE active range (start at last)
2973 * exists active active range
2974 * DNE active active last range (max < last)
2976 * Function ENTRY Start Result index & last
2978 * - at index or before
2979 * Single entry tree at 0-0
2980 * ------------------------
2982 * exists MAS_START MAS_ROOT 0
2983 * exists MAS_PAUSE MAS_ROOT 0
2984 * exists MAS_NONE MAS_ROOT 0
2986 * DNE MAS_START MAS_NONE 0
2987 * DNE MAS_PAUSE MAS_NONE 0
2988 * DNE MAS_NONE MAS_NONE 0
2989 * DNE MAS_ROOT MAS_NONE 0
2993 * exists MAS_START active range
2994 * DNE MAS_START active set to min
2995 * exists MAS_PAUSE active range
2996 * DNE MAS_PAUSE active set to min
2997 * exists MAS_NONE active range (start at index)
2998 * exists active active range
2999 * DNE active active last range (min > index)
3001 * Function ENTRY Start Result index & last
3004 * Single entry tree at 0-0
3005 * ------------------------
3007 * DNE MAS_START MAS_ROOT 1 - oo
3008 * DNE MAS_PAUSE MAS_ROOT 1 - oo
3009 * DNE MAS_NONE MAS_ROOT 1 - oo
3010 * DNE MAS_ROOT MAS_ROOT 1 - oo
3012 * exists MAS_START MAS_ROOT 0
3013 * exists MAS_PAUSE MAS_ROOT 0
3014 * exists MAS_NONE MAS_ROOT 0
3015 * exists MAS_ROOT MAS_ROOT 0
3019 * exists MAS_START active range
3020 * DNE MAS_START active range of NULL
3021 * exists MAS_PAUSE active range
3022 * DNE MAS_PAUSE active range of NULL
3023 * exists MAS_NONE active range
3024 * DNE MAS_NONE active range of NULL
3025 * exists active active range
3026 * DNE active active range of NULL
3029 #define mas_active(x) (((x).node != MAS_ROOT) && \
3030 ((x).node != MAS_START) && \
3031 ((x).node != MAS_PAUSE) && \
3032 ((x).node != MAS_NONE))
3033 static noinline void __init check_state_handling(struct maple_tree *mt)
3035 MA_STATE(mas, mt, 0, 0);
3036 void *entry, *ptr = (void *) 0x1234500;
3040 /* Check MAS_ROOT First */
3041 mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3044 /* prev: Start -> underflow*/
3045 entry = mas_prev(&mas, 0);
3046 MT_BUG_ON(mt, entry != NULL);
3047 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3049 /* prev: Start -> root */
3051 entry = mas_prev(&mas, 0);
3052 MT_BUG_ON(mt, entry != ptr);
3053 MT_BUG_ON(mt, mas.index != 0);
3054 MT_BUG_ON(mt, mas.last != 0);
3055 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3057 /* prev: pause -> root */
3060 entry = mas_prev(&mas, 0);
3061 MT_BUG_ON(mt, entry != ptr);
3062 MT_BUG_ON(mt, mas.index != 0);
3063 MT_BUG_ON(mt, mas.last != 0);
3064 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3066 /* next: start -> none */
3068 entry = mas_next(&mas, ULONG_MAX);
3069 MT_BUG_ON(mt, mas.index != 1);
3070 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3071 MT_BUG_ON(mt, entry != NULL);
3072 MT_BUG_ON(mt, mas.node != MAS_NONE);
3074 /* next: start -> none*/
3076 entry = mas_next(&mas, ULONG_MAX);
3077 MT_BUG_ON(mt, mas.index != 1);
3078 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3079 MT_BUG_ON(mt, entry != NULL);
3080 MT_BUG_ON(mt, mas.node != MAS_NONE);
3082 /* find: start -> root */
3084 entry = mas_find(&mas, ULONG_MAX);
3085 MT_BUG_ON(mt, entry != ptr);
3086 MT_BUG_ON(mt, mas.index != 0);
3087 MT_BUG_ON(mt, mas.last != 0);
3088 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3090 /* find: root -> none */
3091 entry = mas_find(&mas, ULONG_MAX);
3092 MT_BUG_ON(mt, entry != NULL);
3093 MT_BUG_ON(mt, mas.index != 1);
3094 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3095 MT_BUG_ON(mt, mas.node != MAS_NONE);
3097 /* find: none -> none */
3098 entry = mas_find(&mas, ULONG_MAX);
3099 MT_BUG_ON(mt, entry != NULL);
3100 MT_BUG_ON(mt, mas.index != 1);
3101 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3102 MT_BUG_ON(mt, mas.node != MAS_NONE);
3104 /* find: start -> none */
3106 entry = mas_find(&mas, ULONG_MAX);
3107 MT_BUG_ON(mt, entry != NULL);
3108 MT_BUG_ON(mt, mas.index != 1);
3109 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3110 MT_BUG_ON(mt, mas.node != MAS_NONE);
3112 /* find_rev: none -> root */
3113 entry = mas_find_rev(&mas, 0);
3114 MT_BUG_ON(mt, entry != ptr);
3115 MT_BUG_ON(mt, mas.index != 0);
3116 MT_BUG_ON(mt, mas.last != 0);
3117 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3119 /* find_rev: start -> root */
3121 entry = mas_find_rev(&mas, 0);
3122 MT_BUG_ON(mt, entry != ptr);
3123 MT_BUG_ON(mt, mas.index != 0);
3124 MT_BUG_ON(mt, mas.last != 0);
3125 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3127 /* find_rev: root -> none */
3128 entry = mas_find_rev(&mas, 0);
3129 MT_BUG_ON(mt, entry != NULL);
3130 MT_BUG_ON(mt, mas.index != 0);
3131 MT_BUG_ON(mt, mas.last != 0);
3132 MT_BUG_ON(mt, mas.node != MAS_NONE);
3134 /* find_rev: none -> none */
3135 entry = mas_find_rev(&mas, 0);
3136 MT_BUG_ON(mt, entry != NULL);
3137 MT_BUG_ON(mt, mas.index != 0);
3138 MT_BUG_ON(mt, mas.last != 0);
3139 MT_BUG_ON(mt, mas.node != MAS_NONE);
3141 /* find_rev: start -> root */
3143 entry = mas_find_rev(&mas, 0);
3144 MT_BUG_ON(mt, entry != ptr);
3145 MT_BUG_ON(mt, mas.index != 0);
3146 MT_BUG_ON(mt, mas.last != 0);
3147 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3149 /* walk: start -> none */
3151 entry = mas_walk(&mas);
3152 MT_BUG_ON(mt, entry != NULL);
3153 MT_BUG_ON(mt, mas.index != 1);
3154 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3155 MT_BUG_ON(mt, mas.node != MAS_NONE);
3157 /* walk: pause -> none*/
3160 entry = mas_walk(&mas);
3161 MT_BUG_ON(mt, entry != NULL);
3162 MT_BUG_ON(mt, mas.index != 1);
3163 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3164 MT_BUG_ON(mt, mas.node != MAS_NONE);
3166 /* walk: none -> none */
3167 mas.index = mas.last = 10;
3168 entry = mas_walk(&mas);
3169 MT_BUG_ON(mt, entry != NULL);
3170 MT_BUG_ON(mt, mas.index != 1);
3171 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3172 MT_BUG_ON(mt, mas.node != MAS_NONE);
3174 /* walk: none -> none */
3175 entry = mas_walk(&mas);
3176 MT_BUG_ON(mt, entry != NULL);
3177 MT_BUG_ON(mt, mas.index != 1);
3178 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3179 MT_BUG_ON(mt, mas.node != MAS_NONE);
3181 /* walk: start -> root */
3183 entry = mas_walk(&mas);
3184 MT_BUG_ON(mt, entry != ptr);
3185 MT_BUG_ON(mt, mas.index != 0);
3186 MT_BUG_ON(mt, mas.last != 0);
3187 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3189 /* walk: pause -> root */
3192 entry = mas_walk(&mas);
3193 MT_BUG_ON(mt, entry != ptr);
3194 MT_BUG_ON(mt, mas.index != 0);
3195 MT_BUG_ON(mt, mas.last != 0);
3196 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3198 /* walk: none -> root */
3199 mas.node = MAS_NONE;
3200 entry = mas_walk(&mas);
3201 MT_BUG_ON(mt, entry != ptr);
3202 MT_BUG_ON(mt, mas.index != 0);
3203 MT_BUG_ON(mt, mas.last != 0);
3204 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3206 /* walk: root -> root */
3207 entry = mas_walk(&mas);
3208 MT_BUG_ON(mt, entry != ptr);
3209 MT_BUG_ON(mt, mas.index != 0);
3210 MT_BUG_ON(mt, mas.last != 0);
3211 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3213 /* walk: root -> none */
3215 entry = mas_walk(&mas);
3216 MT_BUG_ON(mt, entry != NULL);
3217 MT_BUG_ON(mt, mas.index != 1);
3218 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3219 MT_BUG_ON(mt, mas.node != MAS_NONE);
3221 /* walk: none -> root */
3222 mas.index = mas.last = 0;
3223 entry = mas_walk(&mas);
3224 MT_BUG_ON(mt, entry != ptr);
3225 MT_BUG_ON(mt, mas.index != 0);
3226 MT_BUG_ON(mt, mas.last != 0);
3227 MT_BUG_ON(mt, mas.node != MAS_ROOT);
3231 /* Check when there is an actual node */
3232 mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3233 mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3234 mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3235 mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3239 /* next: start ->active */
3241 entry = mas_next(&mas, ULONG_MAX);
3242 MT_BUG_ON(mt, entry != ptr);
3243 MT_BUG_ON(mt, mas.index != 0x1000);
3244 MT_BUG_ON(mt, mas.last != 0x1500);
3245 MT_BUG_ON(mt, !mas_active(mas));
3247 /* next: pause ->active */
3250 entry = mas_next(&mas, ULONG_MAX);
3251 MT_BUG_ON(mt, entry != ptr);
3252 MT_BUG_ON(mt, mas.index != 0x1000);
3253 MT_BUG_ON(mt, mas.last != 0x1500);
3254 MT_BUG_ON(mt, !mas_active(mas));
3256 /* next: none ->active */
3257 mas.index = mas.last = 0;
3259 mas.node = MAS_NONE;
3260 entry = mas_next(&mas, ULONG_MAX);
3261 MT_BUG_ON(mt, entry != ptr);
3262 MT_BUG_ON(mt, mas.index != 0x1000);
3263 MT_BUG_ON(mt, mas.last != 0x1500);
3264 MT_BUG_ON(mt, !mas_active(mas));
3266 /* next:active ->active */
3267 entry = mas_next(&mas, ULONG_MAX);
3268 MT_BUG_ON(mt, entry != ptr2);
3269 MT_BUG_ON(mt, mas.index != 0x2000);
3270 MT_BUG_ON(mt, mas.last != 0x2500);
3271 MT_BUG_ON(mt, !mas_active(mas));
3273 /* next:active -> active beyond data */
3274 entry = mas_next(&mas, 0x2999);
3275 MT_BUG_ON(mt, entry != NULL);
3276 MT_BUG_ON(mt, mas.index != 0x2501);
3277 MT_BUG_ON(mt, mas.last != 0x2fff);
3278 MT_BUG_ON(mt, !mas_active(mas));
3280 /* Continue after last range ends after max */
3281 entry = mas_next(&mas, ULONG_MAX);
3282 MT_BUG_ON(mt, entry != ptr3);
3283 MT_BUG_ON(mt, mas.index != 0x3000);
3284 MT_BUG_ON(mt, mas.last != 0x3500);
3285 MT_BUG_ON(mt, !mas_active(mas));
3287 /* next:active -> active continued */
3288 entry = mas_next(&mas, ULONG_MAX);
3289 MT_BUG_ON(mt, entry != NULL);
3290 MT_BUG_ON(mt, mas.index != 0x3501);
3291 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3292 MT_BUG_ON(mt, !mas_active(mas));
3294 /* next:active -> overflow */
3295 entry = mas_next(&mas, ULONG_MAX);
3296 MT_BUG_ON(mt, entry != NULL);
3297 MT_BUG_ON(mt, mas.index != 0x3501);
3298 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3299 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3301 /* next:overflow -> overflow */
3302 entry = mas_next(&mas, ULONG_MAX);
3303 MT_BUG_ON(mt, entry != NULL);
3304 MT_BUG_ON(mt, mas.index != 0x3501);
3305 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3306 MT_BUG_ON(mt, mas.node != MAS_OVERFLOW);
3308 /* prev:overflow -> active */
3309 entry = mas_prev(&mas, 0);
3310 MT_BUG_ON(mt, entry != ptr3);
3311 MT_BUG_ON(mt, mas.index != 0x3000);
3312 MT_BUG_ON(mt, mas.last != 0x3500);
3313 MT_BUG_ON(mt, !mas_active(mas));
3315 /* next: none -> active, skip value at location */
3317 entry = mas_next(&mas, ULONG_MAX);
3318 mas.node = MAS_NONE;
3320 entry = mas_next(&mas, ULONG_MAX);
3321 MT_BUG_ON(mt, entry != ptr2);
3322 MT_BUG_ON(mt, mas.index != 0x2000);
3323 MT_BUG_ON(mt, mas.last != 0x2500);
3324 MT_BUG_ON(mt, !mas_active(mas));
3326 /* prev:active ->active */
3327 entry = mas_prev(&mas, 0);
3328 MT_BUG_ON(mt, entry != ptr);
3329 MT_BUG_ON(mt, mas.index != 0x1000);
3330 MT_BUG_ON(mt, mas.last != 0x1500);
3331 MT_BUG_ON(mt, !mas_active(mas));
3333 /* prev:active -> active spanning end range */
3334 entry = mas_prev(&mas, 0x0100);
3335 MT_BUG_ON(mt, entry != NULL);
3336 MT_BUG_ON(mt, mas.index != 0);
3337 MT_BUG_ON(mt, mas.last != 0x0FFF);
3338 MT_BUG_ON(mt, !mas_active(mas));
3340 /* prev:active -> underflow */
3341 entry = mas_prev(&mas, 0);
3342 MT_BUG_ON(mt, entry != NULL);
3343 MT_BUG_ON(mt, mas.index != 0);
3344 MT_BUG_ON(mt, mas.last != 0x0FFF);
3345 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3347 /* prev:underflow -> underflow */
3348 entry = mas_prev(&mas, 0);
3349 MT_BUG_ON(mt, entry != NULL);
3350 MT_BUG_ON(mt, mas.index != 0);
3351 MT_BUG_ON(mt, mas.last != 0x0FFF);
3352 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3354 /* next:underflow -> active */
3355 entry = mas_next(&mas, ULONG_MAX);
3356 MT_BUG_ON(mt, entry != ptr);
3357 MT_BUG_ON(mt, mas.index != 0x1000);
3358 MT_BUG_ON(mt, mas.last != 0x1500);
3359 MT_BUG_ON(mt, !mas_active(mas));
3361 /* prev:first value -> underflow */
3362 entry = mas_prev(&mas, 0x1000);
3363 MT_BUG_ON(mt, entry != NULL);
3364 MT_BUG_ON(mt, mas.index != 0x1000);
3365 MT_BUG_ON(mt, mas.last != 0x1500);
3366 MT_BUG_ON(mt, mas.node != MAS_UNDERFLOW);
3368 /* find:underflow -> first value */
3369 entry = mas_find(&mas, ULONG_MAX);
3370 MT_BUG_ON(mt, entry != ptr);
3371 MT_BUG_ON(mt, mas.index != 0x1000);
3372 MT_BUG_ON(mt, mas.last != 0x1500);
3373 MT_BUG_ON(mt, !mas_active(mas));
3375 /* prev: pause ->active */
3376 mas_set(&mas, 0x3600);
3377 entry = mas_prev(&mas, 0);
3378 MT_BUG_ON(mt, entry != ptr3);
3380 entry = mas_prev(&mas, 0);
3381 MT_BUG_ON(mt, entry != ptr2);
3382 MT_BUG_ON(mt, mas.index != 0x2000);
3383 MT_BUG_ON(mt, mas.last != 0x2500);
3384 MT_BUG_ON(mt, !mas_active(mas));
3386 /* prev:active -> active spanning min */
3387 entry = mas_prev(&mas, 0x1600);
3388 MT_BUG_ON(mt, entry != NULL);
3389 MT_BUG_ON(mt, mas.index != 0x1501);
3390 MT_BUG_ON(mt, mas.last != 0x1FFF);
3391 MT_BUG_ON(mt, !mas_active(mas));
3393 /* prev: active ->active, continue */
3394 entry = mas_prev(&mas, 0);
3395 MT_BUG_ON(mt, entry != ptr);
3396 MT_BUG_ON(mt, mas.index != 0x1000);
3397 MT_BUG_ON(mt, mas.last != 0x1500);
3398 MT_BUG_ON(mt, !mas_active(mas));
3400 /* find: start ->active */
3402 entry = mas_find(&mas, ULONG_MAX);
3403 MT_BUG_ON(mt, entry != ptr);
3404 MT_BUG_ON(mt, mas.index != 0x1000);
3405 MT_BUG_ON(mt, mas.last != 0x1500);
3406 MT_BUG_ON(mt, !mas_active(mas));
3408 /* find: pause ->active */
3411 entry = mas_find(&mas, ULONG_MAX);
3412 MT_BUG_ON(mt, entry != ptr);
3413 MT_BUG_ON(mt, mas.index != 0x1000);
3414 MT_BUG_ON(mt, mas.last != 0x1500);
3415 MT_BUG_ON(mt, !mas_active(mas));
3417 /* find: start ->active on value */;
3418 mas_set(&mas, 1200);
3419 entry = mas_find(&mas, ULONG_MAX);
3420 MT_BUG_ON(mt, entry != ptr);
3421 MT_BUG_ON(mt, mas.index != 0x1000);
3422 MT_BUG_ON(mt, mas.last != 0x1500);
3423 MT_BUG_ON(mt, !mas_active(mas));
3425 /* find:active ->active */
3426 entry = mas_find(&mas, ULONG_MAX);
3427 MT_BUG_ON(mt, entry != ptr2);
3428 MT_BUG_ON(mt, mas.index != 0x2000);
3429 MT_BUG_ON(mt, mas.last != 0x2500);
3430 MT_BUG_ON(mt, !mas_active(mas));
3433 /* find:active -> active (NULL)*/
3434 entry = mas_find(&mas, 0x2700);
3435 MT_BUG_ON(mt, entry != NULL);
3436 MT_BUG_ON(mt, mas.index != 0x2501);
3437 MT_BUG_ON(mt, mas.last != 0x2FFF);
3438 MT_BUG_ON(mt, !mas_active(mas));
3440 /* find: overflow ->active */
3441 entry = mas_find(&mas, 0x5000);
3442 MT_BUG_ON(mt, entry != ptr3);
3443 MT_BUG_ON(mt, mas.index != 0x3000);
3444 MT_BUG_ON(mt, mas.last != 0x3500);
3445 MT_BUG_ON(mt, !mas_active(mas));
3447 /* find:active -> active (NULL) end*/
3448 entry = mas_find(&mas, ULONG_MAX);
3449 MT_BUG_ON(mt, entry != NULL);
3450 MT_BUG_ON(mt, mas.index != 0x3501);
3451 MT_BUG_ON(mt, mas.last != ULONG_MAX);
3452 MT_BUG_ON(mt, !mas_active(mas));
3454 /* find_rev: active (END) ->active */
3455 entry = mas_find_rev(&mas, 0);
3456 MT_BUG_ON(mt, entry != ptr3);
3457 MT_BUG_ON(mt, mas.index != 0x3000);
3458 MT_BUG_ON(mt, mas.last != 0x3500);
3459 MT_BUG_ON(mt, !mas_active(mas));
3461 /* find_rev:active ->active */
3462 entry = mas_find_rev(&mas, 0);
3463 MT_BUG_ON(mt, entry != ptr2);
3464 MT_BUG_ON(mt, mas.index != 0x2000);
3465 MT_BUG_ON(mt, mas.last != 0x2500);
3466 MT_BUG_ON(mt, !mas_active(mas));
3468 /* find_rev: pause ->active */
3470 entry = mas_find_rev(&mas, 0);
3471 MT_BUG_ON(mt, entry != ptr);
3472 MT_BUG_ON(mt, mas.index != 0x1000);
3473 MT_BUG_ON(mt, mas.last != 0x1500);
3474 MT_BUG_ON(mt, !mas_active(mas));
3476 /* find_rev:active -> active */
3477 entry = mas_find_rev(&mas, 0);
3478 MT_BUG_ON(mt, entry != NULL);
3479 MT_BUG_ON(mt, mas.index != 0);
3480 MT_BUG_ON(mt, mas.last != 0x0FFF);
3481 MT_BUG_ON(mt, !mas_active(mas));
3483 /* find_rev: start ->active */
3484 mas_set(&mas, 0x1200);
3485 entry = mas_find_rev(&mas, 0);
3486 MT_BUG_ON(mt, entry != ptr);
3487 MT_BUG_ON(mt, mas.index != 0x1000);
3488 MT_BUG_ON(mt, mas.last != 0x1500);
3489 MT_BUG_ON(mt, !mas_active(mas));
3491 /* mas_walk start ->active */
3492 mas_set(&mas, 0x1200);
3493 entry = mas_walk(&mas);
3494 MT_BUG_ON(mt, entry != ptr);
3495 MT_BUG_ON(mt, mas.index != 0x1000);
3496 MT_BUG_ON(mt, mas.last != 0x1500);
3497 MT_BUG_ON(mt, !mas_active(mas));
3499 /* mas_walk start ->active */
3500 mas_set(&mas, 0x1600);
3501 entry = mas_walk(&mas);
3502 MT_BUG_ON(mt, entry != NULL);
3503 MT_BUG_ON(mt, mas.index != 0x1501);
3504 MT_BUG_ON(mt, mas.last != 0x1fff);
3505 MT_BUG_ON(mt, !mas_active(mas));
3507 /* mas_walk pause ->active */
3508 mas_set(&mas, 0x1200);
3510 entry = mas_walk(&mas);
3511 MT_BUG_ON(mt, entry != ptr);
3512 MT_BUG_ON(mt, mas.index != 0x1000);
3513 MT_BUG_ON(mt, mas.last != 0x1500);
3514 MT_BUG_ON(mt, !mas_active(mas));
3516 /* mas_walk pause -> active */
3517 mas_set(&mas, 0x1600);
3519 entry = mas_walk(&mas);
3520 MT_BUG_ON(mt, entry != NULL);
3521 MT_BUG_ON(mt, mas.index != 0x1501);
3522 MT_BUG_ON(mt, mas.last != 0x1fff);
3523 MT_BUG_ON(mt, !mas_active(mas));
3525 /* mas_walk none -> active */
3526 mas_set(&mas, 0x1200);
3527 mas.node = MAS_NONE;
3528 entry = mas_walk(&mas);
3529 MT_BUG_ON(mt, entry != ptr);
3530 MT_BUG_ON(mt, mas.index != 0x1000);
3531 MT_BUG_ON(mt, mas.last != 0x1500);
3532 MT_BUG_ON(mt, !mas_active(mas));
3534 /* mas_walk none -> active */
3535 mas_set(&mas, 0x1600);
3536 mas.node = MAS_NONE;
3537 entry = mas_walk(&mas);
3538 MT_BUG_ON(mt, entry != NULL);
3539 MT_BUG_ON(mt, mas.index != 0x1501);
3540 MT_BUG_ON(mt, mas.last != 0x1fff);
3541 MT_BUG_ON(mt, !mas_active(mas));
3543 /* mas_walk active -> active */
3547 entry = mas_walk(&mas);
3548 MT_BUG_ON(mt, entry != ptr);
3549 MT_BUG_ON(mt, mas.index != 0x1000);
3550 MT_BUG_ON(mt, mas.last != 0x1500);
3551 MT_BUG_ON(mt, !mas_active(mas));
3553 /* mas_walk active -> active */
3556 entry = mas_walk(&mas);
3557 MT_BUG_ON(mt, entry != NULL);
3558 MT_BUG_ON(mt, mas.index != 0x1501);
3559 MT_BUG_ON(mt, mas.last != 0x1fff);
3560 MT_BUG_ON(mt, !mas_active(mas));
3565 static DEFINE_MTREE(tree);
3566 static int __init maple_tree_seed(void)
3568 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3569 1001, 1002, 1003, 1005, 0,
3573 pr_info("\nTEST STARTING\n\n");
3575 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3576 check_root_expand(&tree);
3577 mtree_destroy(&tree);
3579 #if defined(BENCH_SLOT_STORE)
3581 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3582 bench_slot_store(&tree);
3583 mtree_destroy(&tree);
3586 #if defined(BENCH_NODE_STORE)
3588 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3589 bench_node_store(&tree);
3590 mtree_destroy(&tree);
3593 #if defined(BENCH_AWALK)
3595 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3597 mtree_destroy(&tree);
3600 #if defined(BENCH_WALK)
3602 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3604 mtree_destroy(&tree);
3607 #if defined(BENCH_FORK)
3609 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3610 bench_forking(&tree);
3611 mtree_destroy(&tree);
3614 #if defined(BENCH_MT_FOR_EACH)
3616 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3617 bench_mt_for_each(&tree);
3618 mtree_destroy(&tree);
3621 #if defined(BENCH_MAS_FOR_EACH)
3623 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3624 bench_mas_for_each(&tree);
3625 mtree_destroy(&tree);
3628 #if defined(BENCH_MAS_PREV)
3630 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3631 bench_mas_prev(&tree);
3632 mtree_destroy(&tree);
3636 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3637 check_iteration(&tree);
3638 mtree_destroy(&tree);
3640 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3641 check_forking(&tree);
3642 mtree_destroy(&tree);
3644 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3645 check_mas_store_gfp(&tree);
3646 mtree_destroy(&tree);
3648 /* Test ranges (store and insert) */
3649 mt_init_flags(&tree, 0);
3650 check_ranges(&tree);
3651 mtree_destroy(&tree);
3653 #if defined(CONFIG_64BIT)
3654 /* These tests have ranges outside of 4GB */
3655 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3656 check_alloc_range(&tree);
3657 mtree_destroy(&tree);
3659 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3660 check_alloc_rev_range(&tree);
3661 mtree_destroy(&tree);
3664 mt_init_flags(&tree, 0);
3666 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3668 check_insert(&tree, set[9], &tree); /* Insert 0 */
3669 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3670 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
3672 check_insert(&tree, set[10], ptr); /* Insert 5003 */
3673 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
3674 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
3675 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
3677 /* Clear out the tree */
3678 mtree_destroy(&tree);
3680 /* Try to insert, insert a dup, and load back what was inserted. */
3681 mt_init_flags(&tree, 0);
3682 check_insert(&tree, set[0], &tree); /* Insert 5015 */
3683 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3684 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3687 * Second set of tests try to load a value that doesn't exist, inserts
3688 * a second value, then loads the value again
3690 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
3691 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
3692 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3693 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3695 * Tree currently contains:
3696 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3698 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3699 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3701 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
3702 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
3703 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3704 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
3706 /* Clear out tree */
3707 mtree_destroy(&tree);
3709 mt_init_flags(&tree, 0);
3710 /* Test inserting into a NULL hole. */
3711 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
3712 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
3713 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
3714 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
3715 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
3716 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
3718 /* Clear out the tree */
3719 mtree_destroy(&tree);
3721 mt_init_flags(&tree, 0);
3723 * set[] = {5015, 5014, 5017, 25, 1000,
3724 * 1001, 1002, 1003, 1005, 0,
3728 check_insert(&tree, set[0], ptr); /* 5015 */
3729 check_insert(&tree, set[1], &tree); /* 5014 */
3730 check_insert(&tree, set[2], ptr); /* 5017 */
3731 check_insert(&tree, set[3], &tree); /* 25 */
3732 check_load(&tree, set[0], ptr);
3733 check_load(&tree, set[1], &tree);
3734 check_load(&tree, set[2], ptr);
3735 check_load(&tree, set[3], &tree);
3736 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3737 check_load(&tree, set[0], ptr);
3738 check_load(&tree, set[1], &tree);
3739 check_load(&tree, set[2], ptr);
3740 check_load(&tree, set[3], &tree); /*25 */
3741 check_load(&tree, set[4], ptr);
3742 check_insert(&tree, set[5], &tree); /* 1001 */
3743 check_load(&tree, set[0], ptr);
3744 check_load(&tree, set[1], &tree);
3745 check_load(&tree, set[2], ptr);
3746 check_load(&tree, set[3], &tree);
3747 check_load(&tree, set[4], ptr);
3748 check_load(&tree, set[5], &tree);
3749 check_insert(&tree, set[6], ptr);
3750 check_load(&tree, set[0], ptr);
3751 check_load(&tree, set[1], &tree);
3752 check_load(&tree, set[2], ptr);
3753 check_load(&tree, set[3], &tree);
3754 check_load(&tree, set[4], ptr);
3755 check_load(&tree, set[5], &tree);
3756 check_load(&tree, set[6], ptr);
3757 check_insert(&tree, set[7], &tree);
3758 check_load(&tree, set[0], ptr);
3759 check_insert(&tree, set[8], ptr);
3761 check_insert(&tree, set[9], &tree);
3763 check_load(&tree, set[0], ptr);
3764 check_load(&tree, set[1], &tree);
3765 check_load(&tree, set[2], ptr);
3766 check_load(&tree, set[3], &tree);
3767 check_load(&tree, set[4], ptr);
3768 check_load(&tree, set[5], &tree);
3769 check_load(&tree, set[6], ptr);
3770 check_load(&tree, set[9], &tree);
3771 mtree_destroy(&tree);
3773 mt_init_flags(&tree, 0);
3774 check_seq(&tree, 16, false);
3775 mtree_destroy(&tree);
3777 mt_init_flags(&tree, 0);
3778 check_seq(&tree, 1000, true);
3779 mtree_destroy(&tree);
3781 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3782 check_rev_seq(&tree, 1000, true);
3783 mtree_destroy(&tree);
3785 check_lower_bound_split(&tree);
3786 check_upper_bound_split(&tree);
3787 check_mid_split(&tree);
3789 mt_init_flags(&tree, 0);
3790 check_next_entry(&tree);
3792 check_find_2(&tree);
3793 mtree_destroy(&tree);
3795 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3796 check_prev_entry(&tree);
3797 mtree_destroy(&tree);
3799 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3800 check_gap_combining(&tree);
3801 mtree_destroy(&tree);
3803 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3804 check_node_overwrite(&tree);
3805 mtree_destroy(&tree);
3807 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3808 next_prev_test(&tree);
3809 mtree_destroy(&tree);
3811 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3812 check_spanning_relatives(&tree);
3813 mtree_destroy(&tree);
3815 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3816 check_rev_find(&tree);
3817 mtree_destroy(&tree);
3819 mt_init_flags(&tree, 0);
3820 check_fuzzer(&tree);
3821 mtree_destroy(&tree);
3823 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3825 mtree_destroy(&tree);
3827 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3828 check_bnode_min_spanning(&tree);
3829 mtree_destroy(&tree);
3831 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3832 check_empty_area_window(&tree);
3833 mtree_destroy(&tree);
3835 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3836 check_empty_area_fill(&tree);
3837 mtree_destroy(&tree);
3839 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3840 check_state_handling(&tree);
3841 mtree_destroy(&tree);
3847 pr_info("maple_tree: %u of %u tests passed\n",
3848 atomic_read(&maple_tree_tests_passed),
3849 atomic_read(&maple_tree_tests_run));
3850 if (atomic_read(&maple_tree_tests_run) ==
3851 atomic_read(&maple_tree_tests_passed))
3857 static void __exit maple_tree_harvest(void)
3862 module_init(maple_tree_seed);
3863 module_exit(maple_tree_harvest);
3864 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3865 MODULE_LICENSE("GPL");