1 // SPDX-License-Identifier: GPL-2.0+
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
7 * Any tests that only require the interface of the tree.
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
13 #define MTREE_ALLOC_MAX 0x2000000000000Ul
14 #ifndef CONFIG_DEBUG_MAPLE_TREE
15 #define CONFIG_DEBUG_MAPLE_TREE
17 #define CONFIG_MAPLE_SEARCH
18 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
20 /* #define BENCH_SLOT_STORE */
21 /* #define BENCH_NODE_STORE */
22 /* #define BENCH_AWALK */
23 /* #define BENCH_WALK */
24 /* #define BENCH_MT_FOR_EACH */
25 /* #define BENCH_FORK */
28 #define mt_set_non_kernel(x) do {} while (0)
29 #define mt_zero_nr_tallocated(x) do {} while (0)
31 #define cond_resched() do {} while (0)
33 static int __init mtree_insert_index(struct maple_tree *mt,
34 unsigned long index, gfp_t gfp)
36 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
39 static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
41 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
42 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
45 static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
48 return mtree_insert(mt, index, ptr, GFP_KERNEL);
51 static int __init mtree_test_store_range(struct maple_tree *mt,
52 unsigned long start, unsigned long end, void *ptr)
54 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
57 static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
60 return mtree_test_store_range(mt, start, start, ptr);
63 static int __init mtree_test_insert_range(struct maple_tree *mt,
64 unsigned long start, unsigned long end, void *ptr)
66 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
69 static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
71 return mtree_load(mt, index);
74 static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
76 return mtree_erase(mt, index);
79 #if defined(CONFIG_64BIT)
80 static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
81 unsigned long start, unsigned long end, unsigned long size,
82 unsigned long expected, int eret, void *ptr)
85 unsigned long result = expected + 1;
88 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
90 MT_BUG_ON(mt, ret != eret);
94 MT_BUG_ON(mt, result != expected);
97 static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
98 unsigned long start, unsigned long end, unsigned long size,
99 unsigned long expected, int eret, void *ptr)
102 unsigned long result = expected + 1;
105 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end - 1,
107 MT_BUG_ON(mt, ret != eret);
111 MT_BUG_ON(mt, result != expected);
115 static noinline void __init check_load(struct maple_tree *mt,
116 unsigned long index, void *ptr)
118 void *ret = mtree_test_load(mt, index);
121 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
122 MT_BUG_ON(mt, ret != ptr);
125 static noinline void __init check_store_range(struct maple_tree *mt,
126 unsigned long start, unsigned long end, void *ptr, int expected)
131 ret = mtree_test_store_range(mt, start, end, ptr);
132 MT_BUG_ON(mt, ret != expected);
137 for (i = start; i <= end; i++)
138 check_load(mt, i, ptr);
141 static noinline void __init check_insert_range(struct maple_tree *mt,
142 unsigned long start, unsigned long end, void *ptr, int expected)
147 ret = mtree_test_insert_range(mt, start, end, ptr);
148 MT_BUG_ON(mt, ret != expected);
153 for (i = start; i <= end; i++)
154 check_load(mt, i, ptr);
157 static noinline void __init check_insert(struct maple_tree *mt,
158 unsigned long index, void *ptr)
162 ret = mtree_test_insert(mt, index, ptr);
163 MT_BUG_ON(mt, ret != 0);
166 static noinline void __init check_dup_insert(struct maple_tree *mt,
167 unsigned long index, void *ptr)
171 ret = mtree_test_insert(mt, index, ptr);
172 MT_BUG_ON(mt, ret != -EEXIST);
176 static noinline void __init check_index_load(struct maple_tree *mt,
179 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
182 static inline __init int not_empty(struct maple_node *node)
189 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
197 static noinline void __init check_rev_seq(struct maple_tree *mt,
198 unsigned long max, bool verbose)
200 unsigned long i = max, j;
202 MT_BUG_ON(mt, !mtree_empty(mt));
204 mt_zero_nr_tallocated();
206 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
207 for (j = i; j <= max; j++)
208 check_index_load(mt, j);
210 check_load(mt, i - 1, NULL);
212 MT_BUG_ON(mt, !mt_height(mt));
214 MT_BUG_ON(mt, !mt_height(mt));
217 check_load(mt, max + 1, NULL);
223 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
224 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
230 static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
235 MT_BUG_ON(mt, !mtree_empty(mt));
237 mt_zero_nr_tallocated();
238 for (i = 0; i <= max; i++) {
239 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
240 for (j = 0; j <= i; j++)
241 check_index_load(mt, j);
244 MT_BUG_ON(mt, !mt_height(mt));
245 check_load(mt, i + 1, NULL);
252 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
253 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
259 static noinline void __init check_lb_not_empty(struct maple_tree *mt)
262 unsigned long huge = 4000UL * 1000 * 1000;
267 check_insert(mt, i, (void *) i);
268 for (j = huge; j >= i; j /= 2) {
269 check_load(mt, j-1, NULL);
270 check_load(mt, j, (void *) j);
271 check_load(mt, j+1, NULL);
278 static noinline void __init check_lower_bound_split(struct maple_tree *mt)
280 MT_BUG_ON(mt, !mtree_empty(mt));
281 check_lb_not_empty(mt);
284 static noinline void __init check_upper_bound_split(struct maple_tree *mt)
289 MT_BUG_ON(mt, !mtree_empty(mt));
294 huge = 4000UL * 1000 * 1000;
298 check_insert(mt, i, (void *) i);
299 for (j = i; j >= huge; j *= 2) {
300 check_load(mt, j-1, NULL);
301 check_load(mt, j, (void *) j);
302 check_load(mt, j+1, NULL);
309 static noinline void __init check_mid_split(struct maple_tree *mt)
311 unsigned long huge = 8000UL * 1000 * 1000;
313 check_insert(mt, huge, (void *) huge);
314 check_insert(mt, 0, xa_mk_value(0));
315 check_lb_not_empty(mt);
318 static noinline void __init check_rev_find(struct maple_tree *mt)
320 int i, nr_entries = 200;
322 MA_STATE(mas, mt, 0, 0);
324 for (i = 0; i <= nr_entries; i++)
325 mtree_store_range(mt, i*10, i*10 + 5,
326 xa_mk_value(i), GFP_KERNEL);
330 val = mas_find_rev(&mas, 1000);
331 MT_BUG_ON(mt, val != xa_mk_value(100));
332 val = mas_find_rev(&mas, 1000);
333 MT_BUG_ON(mt, val != NULL);
336 val = mas_find_rev(&mas, 997);
337 MT_BUG_ON(mt, val != NULL);
340 val = mas_find_rev(&mas, 900);
341 MT_BUG_ON(mt, val != xa_mk_value(100));
342 val = mas_find_rev(&mas, 900);
343 MT_BUG_ON(mt, val != xa_mk_value(99));
346 val = mas_find_rev(&mas, 0);
347 MT_BUG_ON(mt, val != xa_mk_value(2));
348 val = mas_find_rev(&mas, 0);
349 MT_BUG_ON(mt, val != xa_mk_value(1));
350 val = mas_find_rev(&mas, 0);
351 MT_BUG_ON(mt, val != xa_mk_value(0));
352 val = mas_find_rev(&mas, 0);
353 MT_BUG_ON(mt, val != NULL);
357 static noinline void __init check_find(struct maple_tree *mt)
359 unsigned long val = 0;
363 unsigned long last = 0, index = 0;
364 void *entry, *entry2;
366 MA_STATE(mas, mt, 0, 0);
369 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
371 #if defined(CONFIG_64BIT)
372 top = 4398046511104UL;
383 for (int i = 0; i <= count; i++) {
385 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
387 MT_BUG_ON(mt, mtree_insert(mt, val,
388 XA_ZERO_ENTRY, GFP_KERNEL));
396 while ((entry = mas_find(&mas, 268435456)) != NULL) {
398 MT_BUG_ON(mt, xa_mk_value(val) != entry);
400 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
403 /* For zero check. */
412 mas_for_each(&mas, entry, ULONG_MAX) {
414 MT_BUG_ON(mt, xa_mk_value(val) != entry);
416 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
418 /* For zero check. */
428 mas_for_each(&mas, entry, ULONG_MAX) {
430 MT_BUG_ON(mt, xa_mk_value(val) != entry);
432 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
434 /* For zero check. */
445 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
446 mt_for_each(mt, entry, index, max) {
447 MT_BUG_ON(mt, xa_mk_value(val) != entry);
449 if (val == 64) /* Skip zero entry. */
451 /* For zero check. */
459 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
460 mt_for_each(mt, entry, index, ULONG_MAX) {
462 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
464 MT_BUG_ON(mt, xa_mk_value(val) != entry);
466 /* Workaround for 32bit */
467 if ((val << 2) < val)
472 if (val == 64) /* Skip zero entry. */
474 /* For zero check. */
478 MT_BUG_ON(mt, max > 25);
480 mtree_erase_index(mt, ULONG_MAX);
484 entry = mt_find(mt, &index, 512);
485 MT_BUG_ON(mt, xa_mk_value(256) != entry);
489 entry = mt_find(mt, &index, 20);
490 MT_BUG_ON(mt, entry != NULL);
494 /* Insert ULONG_MAX */
495 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
500 mas_for_each(&mas, entry, ULONG_MAX) {
502 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
504 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
506 MT_BUG_ON(mt, xa_mk_value(val) != entry);
508 /* Workaround for 32bit */
509 if ((val << 2) < val)
514 /* For zero check. */
523 mas_set(&mas, 1048576);
525 entry = mas_find(&mas, 1048576);
527 MT_BUG_ON(mas.tree, entry == NULL);
531 * 1. get the expected value, leveraging the existence of an end entry
532 * 2. delete end entry
533 * 3. find the last value but searching for ULONG_MAX and then using
536 /* First, get the expected result. */
539 mas.index = ULONG_MAX; /* start at max.. */
540 entry = mas_find(&mas, ULONG_MAX);
541 entry = mas_prev(&mas, 0);
545 /* Erase the last entry. */
547 mas.index = ULONG_MAX;
548 mas.last = ULONG_MAX;
551 /* Get the previous value from MAS_START */
553 entry2 = mas_prev(&mas, 0);
556 MT_BUG_ON(mt, entry != entry2);
557 MT_BUG_ON(mt, index != mas.index);
558 MT_BUG_ON(mt, last != mas.last);
562 mas.index = ULONG_MAX;
563 mas.last = ULONG_MAX;
564 entry2 = mas_prev(&mas, 0);
565 MT_BUG_ON(mt, entry != entry2);
568 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
574 static noinline void __init check_find_2(struct maple_tree *mt)
579 MA_STATE(mas, mt, 0, 0);
581 mas_for_each(&mas, entry, ULONG_MAX)
585 for (i = 0; i < 256; i++) {
586 mtree_insert_index(mt, i, GFP_KERNEL);
590 mas_for_each(&mas, entry, ULONG_MAX) {
591 MT_BUG_ON(mt, entry != xa_mk_value(j));
595 MT_BUG_ON(mt, j != i + 1);
598 for (i = 0; i < 256; i++) {
599 mtree_erase_index(mt, i);
603 mas_for_each(&mas, entry, ULONG_MAX) {
604 if (xa_is_zero(entry))
607 MT_BUG_ON(mt, entry != xa_mk_value(j));
611 MT_BUG_ON(mt, j != 256);
614 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
618 #if defined(CONFIG_64BIT)
619 static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
623 * cat /proc/self/maps | awk '{print $1}'|
624 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
627 static const unsigned long range[] = {
628 /* Inclusive , Exclusive. */
629 0x565234af2000, 0x565234af4000,
630 0x565234af4000, 0x565234af9000,
631 0x565234af9000, 0x565234afb000,
632 0x565234afc000, 0x565234afd000,
633 0x565234afd000, 0x565234afe000,
634 0x565235def000, 0x565235e10000,
635 0x7f36d4bfd000, 0x7f36d4ee2000,
636 0x7f36d4ee2000, 0x7f36d4f04000,
637 0x7f36d4f04000, 0x7f36d504c000,
638 0x7f36d504c000, 0x7f36d5098000,
639 0x7f36d5098000, 0x7f36d5099000,
640 0x7f36d5099000, 0x7f36d509d000,
641 0x7f36d509d000, 0x7f36d509f000,
642 0x7f36d509f000, 0x7f36d50a5000,
643 0x7f36d50b9000, 0x7f36d50db000,
644 0x7f36d50db000, 0x7f36d50dc000,
645 0x7f36d50dc000, 0x7f36d50fa000,
646 0x7f36d50fa000, 0x7f36d5102000,
647 0x7f36d5102000, 0x7f36d5103000,
648 0x7f36d5103000, 0x7f36d5104000,
649 0x7f36d5104000, 0x7f36d5105000,
650 0x7fff5876b000, 0x7fff5878d000,
651 0x7fff5878e000, 0x7fff58791000,
652 0x7fff58791000, 0x7fff58793000,
655 static const unsigned long holes[] = {
657 * Note: start of hole is INCLUSIVE
658 * end of hole is EXCLUSIVE
659 * (opposite of the above table.)
660 * Start of hole, end of hole, size of hole (+1)
662 0x565234afb000, 0x565234afc000, 0x1000,
663 0x565234afe000, 0x565235def000, 0x12F1000,
664 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
668 * req_range consists of 4 values.
672 * 4. number that should be returned.
675 static const unsigned long req_range[] = {
676 0x565234af9000, /* Min */
677 0x7fff58791000, /* Max */
679 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
680 0, /* Return value success. */
683 0x565234AF1 << 12, /* Max */
685 0x565234AEE << 12, /* max - 3. */
686 0, /* Return value success. */
691 562949953421311 << 12,/* First rev hole of size 0x1000 */
692 0, /* Return value success. */
695 0x7F36D510A << 12, /* Max */
697 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
698 0, /* Return value success. */
709 18446744073709551615UL,
710 562915594369134UL << 12,
716 int i, range_count = ARRAY_SIZE(range);
717 int req_range_count = ARRAY_SIZE(req_range);
718 unsigned long min = 0;
720 MA_STATE(mas, mt, 0, 0);
722 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
724 #define DEBUG_REV_RANGE 0
725 for (i = 0; i < range_count; i += 2) {
726 /* Inclusive, Inclusive (with the -1) */
729 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
730 (range[i + 1] >> 12) - 1);
732 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
733 xa_mk_value(range[i] >> 12), 0);
739 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
741 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
742 min, holes[i+1]>>12, holes[i+2]>>12,
745 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
749 pr_debug("Found %lu %lu\n", mas.index, mas.last);
750 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
753 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
754 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
755 min = holes[i+1] >> 12;
760 for (i = 0; i < req_range_count; i += 5) {
762 pr_debug("\tReverse request between %lu-%lu size %lu, should get %lu\n",
764 (req_range[i + 1] >> 12) - 1,
765 req_range[i+2] >> 12,
766 req_range[i+3] >> 12);
768 check_mtree_alloc_rrange(mt,
769 req_range[i] >> 12, /* start */
770 req_range[i+1] >> 12, /* end */
771 req_range[i+2] >> 12, /* size */
772 req_range[i+3] >> 12, /* expected address */
773 req_range[i+4], /* expected return */
774 xa_mk_value(req_range[i] >> 12)); /* pointer */
778 mt_set_non_kernel(1);
779 mtree_erase(mt, 34148798727); /* create a deleted range. */
780 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
786 static noinline void __init check_alloc_range(struct maple_tree *mt)
790 * cat /proc/self/maps|awk '{print $1}'|
791 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
794 static const unsigned long range[] = {
795 /* Inclusive , Exclusive. */
796 0x565234af2000, 0x565234af4000,
797 0x565234af4000, 0x565234af9000,
798 0x565234af9000, 0x565234afb000,
799 0x565234afc000, 0x565234afd000,
800 0x565234afd000, 0x565234afe000,
801 0x565235def000, 0x565235e10000,
802 0x7f36d4bfd000, 0x7f36d4ee2000,
803 0x7f36d4ee2000, 0x7f36d4f04000,
804 0x7f36d4f04000, 0x7f36d504c000,
805 0x7f36d504c000, 0x7f36d5098000,
806 0x7f36d5098000, 0x7f36d5099000,
807 0x7f36d5099000, 0x7f36d509d000,
808 0x7f36d509d000, 0x7f36d509f000,
809 0x7f36d509f000, 0x7f36d50a5000,
810 0x7f36d50b9000, 0x7f36d50db000,
811 0x7f36d50db000, 0x7f36d50dc000,
812 0x7f36d50dc000, 0x7f36d50fa000,
813 0x7f36d50fa000, 0x7f36d5102000,
814 0x7f36d5102000, 0x7f36d5103000,
815 0x7f36d5103000, 0x7f36d5104000,
816 0x7f36d5104000, 0x7f36d5105000,
817 0x7fff5876b000, 0x7fff5878d000,
818 0x7fff5878e000, 0x7fff58791000,
819 0x7fff58791000, 0x7fff58793000,
821 static const unsigned long holes[] = {
822 /* Start of hole, end of hole, size of hole (+1) */
823 0x565234afb000, 0x565234afc000, 0x1000,
824 0x565234afe000, 0x565235def000, 0x12F1000,
825 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
829 * req_range consists of 4 values.
833 * 4. number that should be returned.
836 static const unsigned long req_range[] = {
837 0x565234af9000, /* Min */
838 0x7fff58791000, /* Max */
840 0x565234afb000, /* First hole in our data of size 1000. */
841 0, /* Return value success. */
844 0x7fff58791000, /* Max */
846 0x0, /* First hole in our data of size 2000. */
847 0, /* Return value success. */
850 34148797436 << 12, /* Min */
851 0x7fff587AF000, /* Max */
853 34148798629 << 12, /* Expected location */
854 0, /* Return value success. */
857 34148798623 << 12, /* Min */
858 34148798683 << 12, /* Max */
860 0, /* Expected location */
861 -EBUSY, /* Return value failed. */
863 /* Test filling entire gap. */
864 34148798623 << 12, /* Min */
865 0x7fff587AF000, /* Max */
867 34148798632 << 12, /* Expected location */
868 0, /* Return value success. */
870 /* Test walking off the end of root. */
874 0, /* Expected location */
875 -EBUSY, /* Return value failure. */
877 /* Test looking for too large a hole across entire range. */
880 4503599618982063UL << 12, /* Size */
881 34359052178 << 12, /* Expected location */
882 -EBUSY, /* Return failure. */
884 int i, range_count = ARRAY_SIZE(range);
885 int req_range_count = ARRAY_SIZE(req_range);
886 unsigned long min = 0x565234af2000;
887 MA_STATE(mas, mt, 0, 0);
889 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
891 for (i = 0; i < range_count; i += 2) {
892 #define DEBUG_ALLOC_RANGE 0
893 #if DEBUG_ALLOC_RANGE
894 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
895 (range[i + 1] >> 12) - 1);
898 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
899 xa_mk_value(range[i] >> 12), 0);
906 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
908 #if DEBUG_ALLOC_RANGE
909 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
910 holes[i+1] >> 12, holes[i+2] >> 12,
913 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
916 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
921 for (i = 0; i < req_range_count; i += 5) {
922 #if DEBUG_ALLOC_RANGE
923 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
924 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
925 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
926 req_range[i], req_range[i+1]);
928 check_mtree_alloc_range(mt,
929 req_range[i] >> 12, /* start */
930 req_range[i+1] >> 12, /* end */
931 req_range[i+2] >> 12, /* size */
932 req_range[i+3] >> 12, /* expected address */
933 req_range[i+4], /* expected return */
934 xa_mk_value(req_range[i] >> 12)); /* pointer */
936 #if DEBUG_ALLOC_RANGE
945 static noinline void __init check_ranges(struct maple_tree *mt)
948 static const unsigned long r[] = {
951 17, 22, /* Overlaps previous range. */
958 MT_BUG_ON(mt, !mtree_empty(mt));
959 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
960 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
961 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
962 MT_BUG_ON(mt, !mt_height(mt));
964 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
965 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
966 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
967 MT_BUG_ON(mt, !mt_height(mt));
969 MT_BUG_ON(mt, mt_height(mt));
971 check_seq(mt, 50, false);
972 mt_set_non_kernel(4);
973 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
974 MT_BUG_ON(mt, !mt_height(mt));
977 /* Create tree of 1-100 */
978 check_seq(mt, 100, false);
980 mt_set_non_kernel(10);
981 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
982 MT_BUG_ON(mt, !mt_height(mt));
985 /* Create tree of 1-200 */
986 check_seq(mt, 200, false);
988 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
989 MT_BUG_ON(mt, !mt_height(mt));
992 check_seq(mt, 30, false);
993 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
994 MT_BUG_ON(mt, !mt_height(mt));
997 /* Overwrite across multiple levels. */
998 /* Create tree of 1-400 */
999 check_seq(mt, 400, false);
1000 mt_set_non_kernel(50);
1002 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1003 mt_set_non_kernel(50);
1004 mtree_test_erase(mt, 140);
1005 mtree_test_erase(mt, 141);
1006 mtree_test_erase(mt, 142);
1007 mtree_test_erase(mt, 143);
1008 mtree_test_erase(mt, 130);
1009 mtree_test_erase(mt, 131);
1010 mtree_test_erase(mt, 132);
1011 mtree_test_erase(mt, 133);
1012 mtree_test_erase(mt, 134);
1013 mtree_test_erase(mt, 135);
1014 check_load(mt, r[12], xa_mk_value(r[12]));
1015 check_load(mt, r[13], xa_mk_value(r[12]));
1016 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1017 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1018 check_load(mt, 135, NULL);
1019 check_load(mt, 140, NULL);
1020 mt_set_non_kernel(0);
1021 MT_BUG_ON(mt, !mt_height(mt));
1026 /* Overwrite multiple levels at the end of the tree (slot 7) */
1027 mt_set_non_kernel(50);
1028 check_seq(mt, 400, false);
1029 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1030 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1032 check_load(mt, 346, xa_mk_value(346));
1033 for (i = 347; i <= 352; i++)
1034 check_load(mt, i, xa_mk_value(347));
1035 for (i = 353; i <= 361; i++)
1036 check_load(mt, i, xa_mk_value(353));
1037 check_load(mt, 362, xa_mk_value(362));
1038 mt_set_non_kernel(0);
1039 MT_BUG_ON(mt, !mt_height(mt));
1042 mt_set_non_kernel(50);
1043 check_seq(mt, 400, false);
1044 check_store_range(mt, 352, 364, NULL, 0);
1045 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1046 check_load(mt, 350, xa_mk_value(350));
1047 check_load(mt, 351, xa_mk_value(352));
1048 for (i = 352; i <= 363; i++)
1049 check_load(mt, i, xa_mk_value(352));
1050 check_load(mt, 364, NULL);
1051 check_load(mt, 365, xa_mk_value(365));
1052 mt_set_non_kernel(0);
1053 MT_BUG_ON(mt, !mt_height(mt));
1056 mt_set_non_kernel(5);
1057 check_seq(mt, 400, false);
1058 check_store_range(mt, 352, 364, NULL, 0);
1059 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1060 check_load(mt, 350, xa_mk_value(350));
1061 check_load(mt, 351, xa_mk_value(352));
1062 for (i = 352; i <= 364; i++)
1063 check_load(mt, i, xa_mk_value(352));
1064 check_load(mt, 365, xa_mk_value(365));
1065 mt_set_non_kernel(0);
1066 MT_BUG_ON(mt, !mt_height(mt));
1070 mt_set_non_kernel(50);
1071 check_seq(mt, 400, false);
1072 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1073 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074 mt_set_non_kernel(0);
1076 MT_BUG_ON(mt, !mt_height(mt));
1079 * Interesting cases:
1080 * 1. Overwrite the end of a node and end in the first entry of the next
1082 * 2. Split a single range
1083 * 3. Overwrite the start of a range
1084 * 4. Overwrite the end of a range
1085 * 5. Overwrite the entire range
1086 * 6. Overwrite a range that causes multiple parent nodes to be
1088 * 7. Overwrite a range that causes multiple parent nodes and part of
1089 * root to be combined
1090 * 8. Overwrite the whole tree
1091 * 9. Try to overwrite the zero entry of an alloc tree.
1092 * 10. Write a range larger than a nodes current pivot
1095 mt_set_non_kernel(50);
1096 for (i = 0; i <= 500; i++) {
1099 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1101 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1102 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1103 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1104 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1105 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1107 mt_set_non_kernel(0);
1109 mt_set_non_kernel(50);
1110 for (i = 0; i <= 500; i++) {
1113 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1115 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1116 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1117 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1118 check_store_range(mt, 2460, 2470, NULL, 0);
1119 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1120 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1121 mt_set_non_kernel(0);
1122 MT_BUG_ON(mt, !mt_height(mt));
1125 /* Test rebalance gaps */
1126 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1127 mt_set_non_kernel(50);
1128 for (i = 0; i <= 50; i++) {
1131 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1133 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1134 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1135 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1136 check_store_range(mt, 240, 249, NULL, 0);
1137 mtree_erase(mt, 200);
1138 mtree_erase(mt, 210);
1139 mtree_erase(mt, 220);
1140 mtree_erase(mt, 230);
1141 mt_set_non_kernel(0);
1142 MT_BUG_ON(mt, !mt_height(mt));
1145 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1146 for (i = 0; i <= 500; i++) {
1149 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1151 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1153 MT_BUG_ON(mt, !mt_height(mt));
1156 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1157 for (i = 0; i <= 500; i++) {
1160 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1162 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1163 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1164 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1165 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1166 check_store_range(mt, 4842, 4849, NULL, 0);
1168 MT_BUG_ON(mt, !mt_height(mt));
1171 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1172 for (i = 0; i <= 1300; i++) {
1175 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1176 MT_BUG_ON(mt, mt_height(mt) >= 4);
1178 /* Cause a 3 child split all the way up the tree. */
1179 for (i = 5; i < 215; i += 10)
1180 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1181 for (i = 5; i < 65; i += 10)
1182 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1184 MT_BUG_ON(mt, mt_height(mt) >= 4);
1185 for (i = 5; i < 45; i += 10)
1186 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1188 MT_BUG_ON(mt, mt_height(mt) < 4);
1192 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1193 for (i = 0; i <= 1200; i++) {
1196 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1197 MT_BUG_ON(mt, mt_height(mt) >= 4);
1199 /* Fill parents and leaves before split. */
1200 for (i = 5; i < 455; i += 10)
1201 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1203 for (i = 1; i < 16; i++)
1204 check_store_range(mt, 8185 + i, 8185 + i + 1,
1205 xa_mk_value(8185+i), 0);
1206 MT_BUG_ON(mt, mt_height(mt) >= 4);
1207 /* triple split across multiple levels. */
1208 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1210 MT_BUG_ON(mt, mt_height(mt) != 4);
1213 static noinline void __init check_next_entry(struct maple_tree *mt)
1216 unsigned long limit = 30, i = 0;
1217 MA_STATE(mas, mt, i, i);
1219 MT_BUG_ON(mt, !mtree_empty(mt));
1221 check_seq(mt, limit, false);
1224 /* Check the first one and get ma_state in the correct state. */
1225 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1226 for ( ; i <= limit + 1; i++) {
1227 entry = mas_next(&mas, limit);
1229 MT_BUG_ON(mt, entry != NULL);
1231 MT_BUG_ON(mt, xa_mk_value(i) != entry);
1237 static noinline void __init check_prev_entry(struct maple_tree *mt)
1239 unsigned long index = 16;
1243 MA_STATE(mas, mt, index, index);
1245 MT_BUG_ON(mt, !mtree_empty(mt));
1246 check_seq(mt, 30, false);
1249 value = mas_find(&mas, ULONG_MAX);
1250 MT_BUG_ON(mt, value != xa_mk_value(index));
1251 value = mas_prev(&mas, 0);
1252 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1256 /* Check limits on prev */
1257 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1259 for (i = 0; i <= index; i++) {
1260 mas_set_range(&mas, i*10, i*10+5);
1261 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1265 value = mas_walk(&mas);
1266 MT_BUG_ON(mt, value != xa_mk_value(2));
1268 value = mas_prev(&mas, 19);
1269 MT_BUG_ON(mt, value != NULL);
1272 value = mas_walk(&mas);
1273 MT_BUG_ON(mt, value != xa_mk_value(8));
1275 value = mas_prev(&mas, 76);
1276 MT_BUG_ON(mt, value != NULL);
1281 static noinline void __init check_root_expand(struct maple_tree *mt)
1283 MA_STATE(mas, mt, 0, 0);
1289 ptr = mas_walk(&mas);
1290 MT_BUG_ON(mt, ptr != NULL);
1291 MT_BUG_ON(mt, mas.index != 0);
1292 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1294 ptr = &check_prev_entry;
1296 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1299 ptr = mas_walk(&mas);
1300 MT_BUG_ON(mt, ptr != NULL);
1303 ptr = mas_walk(&mas);
1304 MT_BUG_ON(mt, ptr != &check_prev_entry);
1307 ptr = mas_walk(&mas);
1308 MT_BUG_ON(mt, ptr != NULL);
1313 mt_init_flags(mt, 0);
1317 ptr = &check_prev_entry;
1318 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1321 ptr = mas_walk(&mas);
1322 MT_BUG_ON(mt, ptr != NULL);
1323 MT_BUG_ON(mt, mas.index != 1);
1324 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1326 mas_set_range(&mas, 0, 100);
1327 ptr = mas_walk(&mas);
1328 MT_BUG_ON(mt, ptr != &check_prev_entry);
1329 MT_BUG_ON(mt, mas.last != 0);
1333 mt_init_flags(mt, 0);
1337 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1338 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1339 ptr = mas_next(&mas, ULONG_MAX);
1340 MT_BUG_ON(mt, ptr != NULL);
1341 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1344 ptr = mas_prev(&mas, 0);
1345 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1346 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1352 mt_init_flags(mt, 0);
1355 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1356 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1357 ptr = mas_next(&mas, ULONG_MAX);
1358 MT_BUG_ON(mt, ptr != NULL);
1359 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1362 ptr = mas_prev(&mas, 0);
1363 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1364 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1370 static noinline void __init check_gap_combining(struct maple_tree *mt)
1372 struct maple_enode *mn1, *mn2;
1374 unsigned long singletons = 100;
1375 static const unsigned long *seq100;
1376 static const unsigned long seq100_64[] = {
1390 static const unsigned long seq100_32[] = {
1404 static const unsigned long seq2000[] = {
1408 static const unsigned long seq400[] = {
1410 256, 260, 266, 270, 275, 280, 290, 398,
1414 unsigned long index;
1416 MA_STATE(mas, mt, 0, 0);
1424 mas_set(&mas, index);
1425 MT_BUG_ON(mt, !mtree_empty(mt));
1426 check_seq(mt, singletons, false); /* create 100 singletons. */
1428 mt_set_non_kernel(1);
1429 mtree_test_erase(mt, seq100[2]);
1430 check_load(mt, seq100[2], NULL);
1431 mtree_test_erase(mt, seq100[1]);
1432 check_load(mt, seq100[1], NULL);
1435 entry = mas_find(&mas, ULONG_MAX);
1436 MT_BUG_ON(mt, entry != xa_mk_value(index));
1438 mas_next(&mas, ULONG_MAX);
1439 entry = mas_next(&mas, ULONG_MAX);
1440 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1442 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1445 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1446 * seq100[4]. Search for the gap.
1448 mt_set_non_kernel(1);
1450 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1452 MT_BUG_ON(mt, mas.index != index + 1);
1455 mtree_test_erase(mt, seq100[6]);
1456 check_load(mt, seq100[6], NULL);
1457 mtree_test_erase(mt, seq100[7]);
1458 check_load(mt, seq100[7], NULL);
1459 mtree_test_erase(mt, seq100[8]);
1466 entry = mas_find(&mas, ULONG_MAX);
1467 MT_BUG_ON(mt, entry != xa_mk_value(index));
1469 entry = mas_next(&mas, ULONG_MAX);
1470 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1471 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1473 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1476 * At this point, there is a gap of 3 at seq100[6]. Find it by
1477 * searching 20 - 50 for size 3.
1480 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1482 MT_BUG_ON(mt, mas.index != seq100[6]);
1485 mt_set_non_kernel(1);
1486 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1487 check_load(mt, seq100[13], NULL);
1488 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1489 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1490 check_load(mt, seq100[13], NULL);
1491 check_load(mt, seq100[14], NULL);
1495 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1497 MT_BUG_ON(mt, mas.index != seq100[13]);
1502 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1505 mt_set_non_kernel(2);
1506 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1507 mtree_test_erase(mt, seq100[15]);
1510 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1513 MT_BUG_ON(mt, mas.index != seq100[18]);
1517 /* seq 2000 tests are for multi-level tree gaps */
1518 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1519 check_seq(mt, 2000, false);
1520 mt_set_non_kernel(1);
1521 mtree_test_erase(mt, seq2000[0]);
1522 mtree_test_erase(mt, seq2000[1]);
1524 mt_set_non_kernel(2);
1527 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1529 MT_BUG_ON(mt, mas.index != seq2000[1]);
1534 /* seq 400 tests rebalancing over two levels. */
1535 mt_set_non_kernel(99);
1536 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1537 check_seq(mt, 400, false);
1538 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1539 mt_set_non_kernel(0);
1542 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1543 check_seq(mt, 400, false);
1544 mt_set_non_kernel(50);
1545 mtree_test_store_range(mt, seq400[2], seq400[9],
1546 xa_mk_value(seq400[2]));
1547 mtree_test_store_range(mt, seq400[3], seq400[9],
1548 xa_mk_value(seq400[3]));
1549 mtree_test_store_range(mt, seq400[4], seq400[9],
1550 xa_mk_value(seq400[4]));
1551 mtree_test_store_range(mt, seq400[5], seq400[9],
1552 xa_mk_value(seq400[5]));
1553 mtree_test_store_range(mt, seq400[0], seq400[9],
1554 xa_mk_value(seq400[0]));
1555 mtree_test_store_range(mt, seq400[6], seq400[9],
1556 xa_mk_value(seq400[6]));
1557 mtree_test_store_range(mt, seq400[7], seq400[9],
1558 xa_mk_value(seq400[7]));
1559 mtree_test_store_range(mt, seq400[8], seq400[9],
1560 xa_mk_value(seq400[8]));
1561 mtree_test_store_range(mt, seq400[10], seq400[11],
1562 xa_mk_value(seq400[10]));
1564 mt_set_non_kernel(0);
1567 static noinline void __init check_node_overwrite(struct maple_tree *mt)
1571 for (i = 0; i < max; i++)
1572 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1574 mtree_test_store_range(mt, 319951, 367950, NULL);
1579 #if defined(BENCH_SLOT_STORE)
1580 static noinline void __init bench_slot_store(struct maple_tree *mt)
1582 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1584 for (i = 0; i < max; i += 10)
1585 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1587 for (i = 0; i < count; i++) {
1588 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1589 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1595 #if defined(BENCH_NODE_STORE)
1596 static noinline void __init bench_node_store(struct maple_tree *mt)
1598 int i, overwrite = 76, max = 240, count = 20000000;
1600 for (i = 0; i < max; i += 10)
1601 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1603 for (i = 0; i < count; i++) {
1604 mtree_store_range(mt, overwrite, overwrite + 15,
1605 xa_mk_value(overwrite), GFP_KERNEL);
1608 if (overwrite >= 135)
1614 #if defined(BENCH_AWALK)
1615 static noinline void __init bench_awalk(struct maple_tree *mt)
1617 int i, max = 2500, count = 50000000;
1618 MA_STATE(mas, mt, 1470, 1470);
1620 for (i = 0; i < max; i += 10)
1621 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1623 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1625 for (i = 0; i < count; i++) {
1626 mas_empty_area_rev(&mas, 0, 2000, 10);
1631 #if defined(BENCH_WALK)
1632 static noinline void __init bench_walk(struct maple_tree *mt)
1634 int i, max = 2500, count = 550000000;
1635 MA_STATE(mas, mt, 1470, 1470);
1637 for (i = 0; i < max; i += 10)
1638 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1640 for (i = 0; i < count; i++) {
1648 #if defined(BENCH_MT_FOR_EACH)
1649 static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1651 int i, count = 1000000;
1652 unsigned long max = 2500, index = 0;
1655 for (i = 0; i < max; i += 5)
1656 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1658 for (i = 0; i < count; i++) {
1659 unsigned long j = 0;
1661 mt_for_each(mt, entry, index, max) {
1662 MT_BUG_ON(mt, entry != xa_mk_value(j));
1672 /* check_forking - simulate the kernel forking sequence with the tree. */
1673 static noinline void __init check_forking(struct maple_tree *mt)
1676 struct maple_tree newmt;
1677 int i, nr_entries = 134;
1679 MA_STATE(mas, mt, 0, 0);
1680 MA_STATE(newmas, mt, 0, 0);
1682 for (i = 0; i <= nr_entries; i++)
1683 mtree_store_range(mt, i*10, i*10 + 5,
1684 xa_mk_value(i), GFP_KERNEL);
1686 mt_set_non_kernel(99999);
1687 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1688 newmas.tree = &newmt;
1694 if (mas_expected_entries(&newmas, nr_entries)) {
1699 mas_for_each(&mas, val, ULONG_MAX) {
1700 newmas.index = mas.index;
1701 newmas.last = mas.last;
1702 mas_store(&newmas, val);
1705 mas_destroy(&newmas);
1706 mas_unlock(&newmas);
1707 mt_validate(&newmt);
1708 mt_set_non_kernel(0);
1709 mtree_destroy(&newmt);
1712 static noinline void __init check_iteration(struct maple_tree *mt)
1714 int i, nr_entries = 125;
1716 MA_STATE(mas, mt, 0, 0);
1718 for (i = 0; i <= nr_entries; i++)
1719 mtree_store_range(mt, i * 10, i * 10 + 9,
1720 xa_mk_value(i), GFP_KERNEL);
1722 mt_set_non_kernel(99999);
1726 mas_for_each(&mas, val, 925) {
1727 MT_BUG_ON(mt, mas.index != i * 10);
1728 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1729 /* Overwrite end of entry 92 */
1733 mas_store(&mas, val);
1737 /* Ensure mas_find() gets the next value */
1738 val = mas_find(&mas, ULONG_MAX);
1739 MT_BUG_ON(mt, val != xa_mk_value(i));
1743 mas_for_each(&mas, val, 785) {
1744 MT_BUG_ON(mt, mas.index != i * 10);
1745 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1746 /* Overwrite start of entry 78 */
1750 mas_store(&mas, val);
1755 val = mas_find(&mas, ULONG_MAX);
1756 MT_BUG_ON(mt, val != xa_mk_value(i));
1760 mas_for_each(&mas, val, 765) {
1761 MT_BUG_ON(mt, mas.index != i * 10);
1762 MT_BUG_ON(mt, mas.last != i * 10 + 9);
1763 /* Overwrite end of entry 76 and advance to the end */
1767 mas_store(&mas, val);
1768 mas_next(&mas, ULONG_MAX);
1772 /* Make sure the next find returns the one after 765, 766-769 */
1773 val = mas_find(&mas, ULONG_MAX);
1774 MT_BUG_ON(mt, val != xa_mk_value(76));
1777 mt_set_non_kernel(0);
1780 static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
1783 struct maple_tree newmt;
1784 int i, nr_entries = 135;
1786 MA_STATE(mas, mt, 0, 0);
1787 MA_STATE(newmas, mt, 0, 0);
1789 for (i = 0; i <= nr_entries; i++)
1790 mtree_store_range(mt, i*10, i*10 + 5,
1791 xa_mk_value(i), GFP_KERNEL);
1793 mt_set_non_kernel(99999);
1794 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1795 newmas.tree = &newmt;
1800 mas_for_each(&mas, val, ULONG_MAX) {
1801 newmas.index = mas.index;
1802 newmas.last = mas.last;
1803 mas_store_gfp(&newmas, val, GFP_KERNEL);
1805 mas_unlock(&newmas);
1807 mt_validate(&newmt);
1808 mt_set_non_kernel(0);
1809 mtree_destroy(&newmt);
1812 #if defined(BENCH_FORK)
1813 static noinline void __init bench_forking(struct maple_tree *mt)
1816 struct maple_tree newmt;
1817 int i, nr_entries = 134, nr_fork = 80000;
1819 MA_STATE(mas, mt, 0, 0);
1820 MA_STATE(newmas, mt, 0, 0);
1822 for (i = 0; i <= nr_entries; i++)
1823 mtree_store_range(mt, i*10, i*10 + 5,
1824 xa_mk_value(i), GFP_KERNEL);
1826 for (i = 0; i < nr_fork; i++) {
1827 mt_set_non_kernel(99999);
1828 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1829 newmas.tree = &newmt;
1836 if (mas_expected_entries(&newmas, nr_entries)) {
1840 mas_for_each(&mas, val, ULONG_MAX) {
1841 newmas.index = mas.index;
1842 newmas.last = mas.last;
1843 mas_store(&newmas, val);
1845 mas_destroy(&newmas);
1846 mas_unlock(&newmas);
1848 mt_validate(&newmt);
1849 mt_set_non_kernel(0);
1850 mtree_destroy(&newmt);
1855 static noinline void __init next_prev_test(struct maple_tree *mt)
1859 MA_STATE(mas, mt, 0, 0);
1860 struct maple_enode *mn;
1861 static const unsigned long *level2;
1862 static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
1864 static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
1866 unsigned long last_index;
1871 last_index = 0x138e;
1878 for (i = 0; i <= nr_entries; i++)
1879 mtree_store_range(mt, i*10, i*10 + 5,
1880 xa_mk_value(i), GFP_KERNEL);
1883 for (i = 0; i <= nr_entries / 2; i++) {
1884 mas_next(&mas, 1000);
1885 if (mas_is_none(&mas))
1892 mas_for_each(&mas, val, 1000) {
1899 mas_for_each(&mas, val, 1000) {
1905 * 680 - 685 = 0x61a00001930c
1907 * 690 - 695 = 0x61a00001930c
1908 * Check simple next/prev
1911 val = mas_walk(&mas);
1912 MT_BUG_ON(mt, val != NULL);
1914 val = mas_next(&mas, 1000);
1915 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1916 MT_BUG_ON(mt, mas.index != 690);
1917 MT_BUG_ON(mt, mas.last != 695);
1919 val = mas_prev(&mas, 0);
1920 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
1921 MT_BUG_ON(mt, mas.index != 680);
1922 MT_BUG_ON(mt, mas.last != 685);
1924 val = mas_next(&mas, 1000);
1925 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1926 MT_BUG_ON(mt, mas.index != 690);
1927 MT_BUG_ON(mt, mas.last != 695);
1929 val = mas_next(&mas, 1000);
1930 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
1931 MT_BUG_ON(mt, mas.index != 700);
1932 MT_BUG_ON(mt, mas.last != 705);
1934 /* Check across node boundaries of the tree */
1936 val = mas_walk(&mas);
1937 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1938 MT_BUG_ON(mt, mas.index != 70);
1939 MT_BUG_ON(mt, mas.last != 75);
1941 val = mas_next(&mas, 1000);
1942 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
1943 MT_BUG_ON(mt, mas.index != 80);
1944 MT_BUG_ON(mt, mas.last != 85);
1946 val = mas_prev(&mas, 70);
1947 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1948 MT_BUG_ON(mt, mas.index != 70);
1949 MT_BUG_ON(mt, mas.last != 75);
1951 /* Check across two levels of the tree */
1953 mas_set(&mas, level2[0]);
1954 val = mas_walk(&mas);
1955 MT_BUG_ON(mt, val != NULL);
1956 val = mas_next(&mas, level2[1]);
1957 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1958 MT_BUG_ON(mt, mas.index != level2[2]);
1959 MT_BUG_ON(mt, mas.last != level2[3]);
1962 val = mas_next(&mas, level2[1]);
1963 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
1964 MT_BUG_ON(mt, mas.index != level2[4]);
1965 MT_BUG_ON(mt, mas.last != level2[5]);
1966 MT_BUG_ON(mt, mn == mas.node);
1968 val = mas_prev(&mas, 0);
1969 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1970 MT_BUG_ON(mt, mas.index != level2[2]);
1971 MT_BUG_ON(mt, mas.last != level2[3]);
1973 /* Check running off the end and back on */
1974 mas_set(&mas, nr_entries * 10);
1975 val = mas_walk(&mas);
1976 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
1977 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
1978 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
1980 val = mas_next(&mas, ULONG_MAX);
1981 MT_BUG_ON(mt, val != NULL);
1982 MT_BUG_ON(mt, mas.index != last_index);
1983 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1985 val = mas_prev(&mas, 0);
1986 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
1987 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
1988 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
1990 /* Check running off the start and back on */
1993 val = mas_walk(&mas);
1994 MT_BUG_ON(mt, val != xa_mk_value(1));
1995 MT_BUG_ON(mt, mas.index != 10);
1996 MT_BUG_ON(mt, mas.last != 15);
1998 val = mas_prev(&mas, 0);
1999 MT_BUG_ON(mt, val != xa_mk_value(0));
2000 MT_BUG_ON(mt, mas.index != 0);
2001 MT_BUG_ON(mt, mas.last != 5);
2003 val = mas_prev(&mas, 0);
2004 MT_BUG_ON(mt, val != NULL);
2005 MT_BUG_ON(mt, mas.index != 0);
2006 MT_BUG_ON(mt, mas.last != 0);
2010 mas_store(&mas, NULL);
2015 val = mas_prev(&mas, 0);
2016 MT_BUG_ON(mt, val != NULL);
2017 MT_BUG_ON(mt, mas.index != 0);
2018 MT_BUG_ON(mt, mas.last != 0);
2024 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2025 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2028 val = mas_prev(&mas, 4);
2029 MT_BUG_ON(mt, val != NULL);
2035 /* Test spanning writes that require balancing right sibling or right cousin */
2036 static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2039 unsigned long i, nr_entries = 1000;
2041 for (i = 0; i <= nr_entries; i++)
2042 mtree_store_range(mt, i*10, i*10 + 5,
2043 xa_mk_value(i), GFP_KERNEL);
2046 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2049 static noinline void __init check_fuzzer(struct maple_tree *mt)
2052 * 1. Causes a spanning rebalance of a single root node.
2053 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2054 * entire right side is consumed.
2056 mtree_test_insert(mt, 88, (void *)0xb1);
2057 mtree_test_insert(mt, 84, (void *)0xa9);
2058 mtree_test_insert(mt, 2, (void *)0x5);
2059 mtree_test_insert(mt, 4, (void *)0x9);
2060 mtree_test_insert(mt, 14, (void *)0x1d);
2061 mtree_test_insert(mt, 7, (void *)0xf);
2062 mtree_test_insert(mt, 12, (void *)0x19);
2063 mtree_test_insert(mt, 18, (void *)0x25);
2064 mtree_test_store_range(mt, 8, 18, (void *)0x11);
2069 * 2. Cause a spanning rebalance of two nodes in root.
2070 * Fixed by setting mast->r->max correctly.
2072 mt_init_flags(mt, 0);
2073 mtree_test_store(mt, 87, (void *)0xaf);
2074 mtree_test_store(mt, 0, (void *)0x1);
2075 mtree_test_load(mt, 4);
2076 mtree_test_insert(mt, 4, (void *)0x9);
2077 mtree_test_store(mt, 8, (void *)0x11);
2078 mtree_test_store(mt, 44, (void *)0x59);
2079 mtree_test_store(mt, 68, (void *)0x89);
2080 mtree_test_store(mt, 2, (void *)0x5);
2081 mtree_test_insert(mt, 43, (void *)0x57);
2082 mtree_test_insert(mt, 24, (void *)0x31);
2083 mtree_test_insert(mt, 844, (void *)0x699);
2084 mtree_test_store(mt, 84, (void *)0xa9);
2085 mtree_test_store(mt, 4, (void *)0x9);
2086 mtree_test_erase(mt, 4);
2087 mtree_test_load(mt, 5);
2088 mtree_test_erase(mt, 0);
2092 * 3. Cause a node overflow on copy
2093 * Fixed by using the correct check for node size in mas_wr_modify()
2094 * Also discovered issue with metadata setting.
2096 mt_init_flags(mt, 0);
2097 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2098 mtree_test_store(mt, 4, (void *)0x9);
2099 mtree_test_erase(mt, 5);
2100 mtree_test_erase(mt, 0);
2101 mtree_test_erase(mt, 4);
2102 mtree_test_store(mt, 5, (void *)0xb);
2103 mtree_test_erase(mt, 5);
2104 mtree_test_store(mt, 5, (void *)0xb);
2105 mtree_test_erase(mt, 5);
2106 mtree_test_erase(mt, 4);
2107 mtree_test_store(mt, 4, (void *)0x9);
2108 mtree_test_store(mt, 444, (void *)0x379);
2109 mtree_test_store(mt, 0, (void *)0x1);
2110 mtree_test_load(mt, 0);
2111 mtree_test_store(mt, 5, (void *)0xb);
2112 mtree_test_erase(mt, 0);
2116 * 4. spanning store failure due to writing incorrect pivot value at
2118 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2121 mt_init_flags(mt, 0);
2122 mtree_test_insert(mt, 261, (void *)0x20b);
2123 mtree_test_store(mt, 516, (void *)0x409);
2124 mtree_test_store(mt, 6, (void *)0xd);
2125 mtree_test_insert(mt, 5, (void *)0xb);
2126 mtree_test_insert(mt, 1256, (void *)0x9d1);
2127 mtree_test_store(mt, 4, (void *)0x9);
2128 mtree_test_erase(mt, 1);
2129 mtree_test_store(mt, 56, (void *)0x71);
2130 mtree_test_insert(mt, 1, (void *)0x3);
2131 mtree_test_store(mt, 24, (void *)0x31);
2132 mtree_test_erase(mt, 1);
2133 mtree_test_insert(mt, 2263, (void *)0x11af);
2134 mtree_test_insert(mt, 446, (void *)0x37d);
2135 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2136 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2140 * 5. mas_wr_extend_null() may overflow slots.
2141 * Fix by checking against wr_mas->node_end.
2143 mt_init_flags(mt, 0);
2144 mtree_test_store(mt, 48, (void *)0x61);
2145 mtree_test_store(mt, 3, (void *)0x7);
2146 mtree_test_load(mt, 0);
2147 mtree_test_store(mt, 88, (void *)0xb1);
2148 mtree_test_store(mt, 81, (void *)0xa3);
2149 mtree_test_insert(mt, 0, (void *)0x1);
2150 mtree_test_insert(mt, 8, (void *)0x11);
2151 mtree_test_insert(mt, 4, (void *)0x9);
2152 mtree_test_insert(mt, 2480, (void *)0x1361);
2153 mtree_test_insert(mt, ULONG_MAX,
2154 (void *)0xffffffffffffffff);
2155 mtree_test_erase(mt, ULONG_MAX);
2159 * 6. When reusing a node with an implied pivot and the node is
2160 * shrinking, old data would be left in the implied slot
2161 * Fixed by checking the last pivot for the mas->max and clear
2162 * accordingly. This only affected the left-most node as that node is
2163 * the only one allowed to end in NULL.
2165 mt_init_flags(mt, 0);
2166 mtree_test_erase(mt, 3);
2167 mtree_test_insert(mt, 22, (void *)0x2d);
2168 mtree_test_insert(mt, 15, (void *)0x1f);
2169 mtree_test_load(mt, 2);
2170 mtree_test_insert(mt, 1, (void *)0x3);
2171 mtree_test_insert(mt, 1, (void *)0x3);
2172 mtree_test_insert(mt, 5, (void *)0xb);
2173 mtree_test_erase(mt, 1);
2174 mtree_test_insert(mt, 1, (void *)0x3);
2175 mtree_test_insert(mt, 4, (void *)0x9);
2176 mtree_test_insert(mt, 1, (void *)0x3);
2177 mtree_test_erase(mt, 1);
2178 mtree_test_insert(mt, 2, (void *)0x5);
2179 mtree_test_insert(mt, 1, (void *)0x3);
2180 mtree_test_erase(mt, 3);
2181 mtree_test_insert(mt, 22, (void *)0x2d);
2182 mtree_test_insert(mt, 15, (void *)0x1f);
2183 mtree_test_insert(mt, 2, (void *)0x5);
2184 mtree_test_insert(mt, 1, (void *)0x3);
2185 mtree_test_insert(mt, 8, (void *)0x11);
2186 mtree_test_load(mt, 2);
2187 mtree_test_insert(mt, 1, (void *)0x3);
2188 mtree_test_insert(mt, 1, (void *)0x3);
2189 mtree_test_store(mt, 1, (void *)0x3);
2190 mtree_test_insert(mt, 5, (void *)0xb);
2191 mtree_test_erase(mt, 1);
2192 mtree_test_insert(mt, 1, (void *)0x3);
2193 mtree_test_insert(mt, 4, (void *)0x9);
2194 mtree_test_insert(mt, 1, (void *)0x3);
2195 mtree_test_erase(mt, 1);
2196 mtree_test_insert(mt, 2, (void *)0x5);
2197 mtree_test_insert(mt, 1, (void *)0x3);
2198 mtree_test_erase(mt, 3);
2199 mtree_test_insert(mt, 22, (void *)0x2d);
2200 mtree_test_insert(mt, 15, (void *)0x1f);
2201 mtree_test_insert(mt, 2, (void *)0x5);
2202 mtree_test_insert(mt, 1, (void *)0x3);
2203 mtree_test_insert(mt, 8, (void *)0x11);
2204 mtree_test_insert(mt, 12, (void *)0x19);
2205 mtree_test_erase(mt, 1);
2206 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2207 mtree_test_erase(mt, 62);
2208 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2209 mtree_test_insert(mt, 11, (void *)0x17);
2210 mtree_test_insert(mt, 3, (void *)0x7);
2211 mtree_test_insert(mt, 3, (void *)0x7);
2212 mtree_test_store(mt, 62, (void *)0x7d);
2213 mtree_test_erase(mt, 62);
2214 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2215 mtree_test_erase(mt, 1);
2216 mtree_test_insert(mt, 22, (void *)0x2d);
2217 mtree_test_insert(mt, 12, (void *)0x19);
2218 mtree_test_erase(mt, 1);
2219 mtree_test_insert(mt, 3, (void *)0x7);
2220 mtree_test_store(mt, 62, (void *)0x7d);
2221 mtree_test_erase(mt, 62);
2222 mtree_test_insert(mt, 122, (void *)0xf5);
2223 mtree_test_store(mt, 3, (void *)0x7);
2224 mtree_test_insert(mt, 0, (void *)0x1);
2225 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2226 mtree_test_insert(mt, 85, (void *)0xab);
2227 mtree_test_insert(mt, 72, (void *)0x91);
2228 mtree_test_insert(mt, 81, (void *)0xa3);
2229 mtree_test_insert(mt, 726, (void *)0x5ad);
2230 mtree_test_insert(mt, 0, (void *)0x1);
2231 mtree_test_insert(mt, 1, (void *)0x3);
2232 mtree_test_store(mt, 51, (void *)0x67);
2233 mtree_test_insert(mt, 611, (void *)0x4c7);
2234 mtree_test_insert(mt, 485, (void *)0x3cb);
2235 mtree_test_insert(mt, 1, (void *)0x3);
2236 mtree_test_erase(mt, 1);
2237 mtree_test_insert(mt, 0, (void *)0x1);
2238 mtree_test_insert(mt, 1, (void *)0x3);
2239 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2240 mtree_test_load(mt, 1);
2241 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2242 mtree_test_insert(mt, 1, (void *)0x3);
2243 mtree_test_erase(mt, 1);
2244 mtree_test_load(mt, 53);
2245 mtree_test_load(mt, 1);
2246 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2247 mtree_test_insert(mt, 222, (void *)0x1bd);
2248 mtree_test_insert(mt, 485, (void *)0x3cb);
2249 mtree_test_insert(mt, 1, (void *)0x3);
2250 mtree_test_erase(mt, 1);
2251 mtree_test_load(mt, 0);
2252 mtree_test_insert(mt, 21, (void *)0x2b);
2253 mtree_test_insert(mt, 3, (void *)0x7);
2254 mtree_test_store(mt, 621, (void *)0x4db);
2255 mtree_test_insert(mt, 0, (void *)0x1);
2256 mtree_test_erase(mt, 5);
2257 mtree_test_insert(mt, 1, (void *)0x3);
2258 mtree_test_store(mt, 62, (void *)0x7d);
2259 mtree_test_erase(mt, 62);
2260 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2261 mtree_test_insert(mt, 22, (void *)0x2d);
2262 mtree_test_insert(mt, 12, (void *)0x19);
2263 mtree_test_erase(mt, 1);
2264 mtree_test_insert(mt, 1, (void *)0x3);
2265 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2266 mtree_test_erase(mt, 62);
2267 mtree_test_erase(mt, 1);
2268 mtree_test_load(mt, 1);
2269 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2270 mtree_test_insert(mt, 1, (void *)0x3);
2271 mtree_test_erase(mt, 1);
2272 mtree_test_load(mt, 53);
2273 mtree_test_load(mt, 1);
2274 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2275 mtree_test_insert(mt, 222, (void *)0x1bd);
2276 mtree_test_insert(mt, 485, (void *)0x3cb);
2277 mtree_test_insert(mt, 1, (void *)0x3);
2278 mtree_test_erase(mt, 1);
2279 mtree_test_insert(mt, 1, (void *)0x3);
2280 mtree_test_load(mt, 0);
2281 mtree_test_load(mt, 0);
2285 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2286 * data by overwriting it first - that way metadata is of no concern.
2288 mt_init_flags(mt, 0);
2289 mtree_test_load(mt, 1);
2290 mtree_test_insert(mt, 102, (void *)0xcd);
2291 mtree_test_erase(mt, 2);
2292 mtree_test_erase(mt, 0);
2293 mtree_test_load(mt, 0);
2294 mtree_test_insert(mt, 4, (void *)0x9);
2295 mtree_test_insert(mt, 2, (void *)0x5);
2296 mtree_test_insert(mt, 110, (void *)0xdd);
2297 mtree_test_insert(mt, 1, (void *)0x3);
2298 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2299 mtree_test_erase(mt, 2);
2300 mtree_test_store(mt, 0, (void *)0x1);
2301 mtree_test_store(mt, 112, (void *)0xe1);
2302 mtree_test_insert(mt, 21, (void *)0x2b);
2303 mtree_test_store(mt, 1, (void *)0x3);
2304 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2305 mtree_test_store(mt, 2, (void *)0x5);
2306 mtree_test_load(mt, 22);
2307 mtree_test_erase(mt, 2);
2308 mtree_test_store(mt, 210, (void *)0x1a5);
2309 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2310 mtree_test_store(mt, 2, (void *)0x5);
2311 mtree_test_erase(mt, 2);
2312 mtree_test_erase(mt, 22);
2313 mtree_test_erase(mt, 1);
2314 mtree_test_erase(mt, 2);
2315 mtree_test_store(mt, 0, (void *)0x1);
2316 mtree_test_load(mt, 112);
2317 mtree_test_insert(mt, 2, (void *)0x5);
2318 mtree_test_erase(mt, 2);
2319 mtree_test_store(mt, 1, (void *)0x3);
2320 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2321 mtree_test_erase(mt, 0);
2322 mtree_test_erase(mt, 2);
2323 mtree_test_store(mt, 2, (void *)0x5);
2324 mtree_test_erase(mt, 0);
2325 mtree_test_erase(mt, 2);
2326 mtree_test_store(mt, 0, (void *)0x1);
2327 mtree_test_store(mt, 0, (void *)0x1);
2328 mtree_test_erase(mt, 2);
2329 mtree_test_store(mt, 2, (void *)0x5);
2330 mtree_test_erase(mt, 2);
2331 mtree_test_insert(mt, 2, (void *)0x5);
2332 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2333 mtree_test_erase(mt, 0);
2334 mtree_test_erase(mt, 2);
2335 mtree_test_store(mt, 0, (void *)0x1);
2336 mtree_test_load(mt, 112);
2337 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2338 mtree_test_store(mt, 2, (void *)0x5);
2339 mtree_test_load(mt, 110);
2340 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2341 mtree_test_load(mt, 2);
2342 mtree_test_store(mt, 2, (void *)0x5);
2343 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2344 mtree_test_erase(mt, 12);
2345 mtree_test_store(mt, 2, (void *)0x5);
2346 mtree_test_load(mt, 22);
2351 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2352 * may be set incorrectly to the final pivot and not the right max.
2353 * Fix by setting the left max to orig right max if the entire node is
2356 mt_init_flags(mt, 0);
2357 mtree_test_store(mt, 6, (void *)0xd);
2358 mtree_test_store(mt, 67, (void *)0x87);
2359 mtree_test_insert(mt, 15, (void *)0x1f);
2360 mtree_test_insert(mt, 6716, (void *)0x3479);
2361 mtree_test_store(mt, 61, (void *)0x7b);
2362 mtree_test_insert(mt, 13, (void *)0x1b);
2363 mtree_test_store(mt, 8, (void *)0x11);
2364 mtree_test_insert(mt, 1, (void *)0x3);
2365 mtree_test_load(mt, 0);
2366 mtree_test_erase(mt, 67167);
2367 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2368 mtree_test_insert(mt, 6, (void *)0xd);
2369 mtree_test_erase(mt, 67);
2370 mtree_test_insert(mt, 1, (void *)0x3);
2371 mtree_test_erase(mt, 667167);
2372 mtree_test_insert(mt, 6, (void *)0xd);
2373 mtree_test_store(mt, 67, (void *)0x87);
2374 mtree_test_insert(mt, 5, (void *)0xb);
2375 mtree_test_erase(mt, 1);
2376 mtree_test_insert(mt, 6, (void *)0xd);
2377 mtree_test_erase(mt, 67);
2378 mtree_test_insert(mt, 15, (void *)0x1f);
2379 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2380 mtree_test_insert(mt, 1, (void *)0x3);
2381 mtree_test_load(mt, 7);
2382 mtree_test_insert(mt, 16, (void *)0x21);
2383 mtree_test_insert(mt, 36, (void *)0x49);
2384 mtree_test_store(mt, 67, (void *)0x87);
2385 mtree_test_store(mt, 6, (void *)0xd);
2386 mtree_test_insert(mt, 367, (void *)0x2df);
2387 mtree_test_insert(mt, 115, (void *)0xe7);
2388 mtree_test_store(mt, 0, (void *)0x1);
2389 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2390 mtree_test_store(mt, 1, (void *)0x3);
2391 mtree_test_erase(mt, 67167);
2392 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2393 mtree_test_store(mt, 1, (void *)0x3);
2394 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2395 mtree_test_load(mt, 67);
2396 mtree_test_insert(mt, 1, (void *)0x3);
2397 mtree_test_erase(mt, 67167);
2401 * 9. spanning store to the end of data caused an invalid metadata
2402 * length which resulted in a crash eventually.
2403 * Fix by checking if there is a value in pivot before incrementing the
2404 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2405 * abstract the two locations this happens into a function called
2406 * mas_leaf_set_meta().
2408 mt_init_flags(mt, 0);
2409 mtree_test_insert(mt, 21, (void *)0x2b);
2410 mtree_test_insert(mt, 12, (void *)0x19);
2411 mtree_test_insert(mt, 6, (void *)0xd);
2412 mtree_test_insert(mt, 8, (void *)0x11);
2413 mtree_test_insert(mt, 2, (void *)0x5);
2414 mtree_test_insert(mt, 91, (void *)0xb7);
2415 mtree_test_insert(mt, 18, (void *)0x25);
2416 mtree_test_insert(mt, 81, (void *)0xa3);
2417 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2418 mtree_test_store(mt, 1, (void *)0x3);
2419 mtree_test_erase(mt, 8);
2420 mtree_test_insert(mt, 11, (void *)0x17);
2421 mtree_test_insert(mt, 8, (void *)0x11);
2422 mtree_test_insert(mt, 21, (void *)0x2b);
2423 mtree_test_insert(mt, 2, (void *)0x5);
2424 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2425 mtree_test_erase(mt, ULONG_MAX - 10);
2426 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2427 mtree_test_erase(mt, 2);
2428 mtree_test_insert(mt, 1211, (void *)0x977);
2429 mtree_test_insert(mt, 111, (void *)0xdf);
2430 mtree_test_insert(mt, 13, (void *)0x1b);
2431 mtree_test_insert(mt, 211, (void *)0x1a7);
2432 mtree_test_insert(mt, 11, (void *)0x17);
2433 mtree_test_insert(mt, 5, (void *)0xb);
2434 mtree_test_insert(mt, 1218, (void *)0x985);
2435 mtree_test_insert(mt, 61, (void *)0x7b);
2436 mtree_test_store(mt, 1, (void *)0x3);
2437 mtree_test_insert(mt, 121, (void *)0xf3);
2438 mtree_test_insert(mt, 8, (void *)0x11);
2439 mtree_test_insert(mt, 21, (void *)0x2b);
2440 mtree_test_insert(mt, 2, (void *)0x5);
2441 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2442 mtree_test_erase(mt, ULONG_MAX - 10);
2445 /* duplicate the tree with a specific gap */
2446 static noinline void __init check_dup_gaps(struct maple_tree *mt,
2447 unsigned long nr_entries, bool zero_start,
2450 unsigned long i = 0;
2451 struct maple_tree newmt;
2454 MA_STATE(mas, mt, 0, 0);
2455 MA_STATE(newmas, &newmt, 0, 0);
2460 mt_zero_nr_tallocated();
2461 for (; i <= nr_entries; i++)
2462 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2463 xa_mk_value(i), GFP_KERNEL);
2465 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2466 mt_set_non_kernel(99999);
2468 ret = mas_expected_entries(&newmas, nr_entries);
2469 mt_set_non_kernel(0);
2470 MT_BUG_ON(mt, ret != 0);
2473 mas_for_each(&mas, tmp, ULONG_MAX) {
2474 newmas.index = mas.index;
2475 newmas.last = mas.last;
2476 mas_store(&newmas, tmp);
2479 mas_destroy(&newmas);
2480 mas_unlock(&newmas);
2482 mtree_destroy(&newmt);
2485 /* Duplicate many sizes of trees. Mainly to test expected entry values */
2486 static noinline void __init check_dup(struct maple_tree *mt)
2489 int big_start = 100010;
2491 /* Check with a value at zero */
2492 for (i = 10; i < 1000; i++) {
2493 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2494 check_dup_gaps(mt, i, true, 5);
2501 /* Check with a value at zero, no gap */
2502 for (i = 1000; i < 2000; i++) {
2503 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2504 check_dup_gaps(mt, i, true, 0);
2511 /* Check with a value at zero and unreasonably large */
2512 for (i = big_start; i < big_start + 10; i++) {
2513 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2514 check_dup_gaps(mt, i, true, 5);
2521 /* Small to medium size not starting at zero*/
2522 for (i = 200; i < 1000; i++) {
2523 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2524 check_dup_gaps(mt, i, false, 5);
2531 /* Unreasonably large not starting at zero*/
2532 for (i = big_start; i < big_start + 10; i++) {
2533 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2534 check_dup_gaps(mt, i, false, 5);
2541 /* Check non-allocation tree not starting at zero */
2542 for (i = 1500; i < 3000; i++) {
2543 mt_init_flags(mt, 0);
2544 check_dup_gaps(mt, i, false, 5);
2553 /* Check non-allocation tree starting at zero */
2554 for (i = 200; i < 1000; i++) {
2555 mt_init_flags(mt, 0);
2556 check_dup_gaps(mt, i, true, 5);
2563 /* Unreasonably large */
2564 for (i = big_start + 5; i < big_start + 10; i++) {
2565 mt_init_flags(mt, 0);
2566 check_dup_gaps(mt, i, true, 5);
2574 static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2577 MA_STATE(mas, mt, 0, 0);
2579 mt_set_non_kernel(9999);
2582 mas_set_range(&mas, i*10, i*10+9);
2583 mas_store(&mas, check_bnode_min_spanning);
2586 mas_set_range(&mas, 240, 509);
2587 mas_store(&mas, NULL);
2590 mt_set_non_kernel(0);
2593 static noinline void __init check_empty_area_window(struct maple_tree *mt)
2595 unsigned long i, nr_entries = 20;
2596 MA_STATE(mas, mt, 0, 0);
2598 for (i = 1; i <= nr_entries; i++)
2599 mtree_store_range(mt, i*10, i*10 + 9,
2600 xa_mk_value(i), GFP_KERNEL);
2602 /* Create another hole besides the one at 0 */
2603 mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2605 /* Check lower bounds that don't fit */
2607 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2610 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2612 /* Check lower bound that does fit */
2614 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2615 MT_BUG_ON(mt, mas.index != 5);
2616 MT_BUG_ON(mt, mas.last != 9);
2619 /* Check one gap that doesn't fit and one that does */
2622 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2623 MT_BUG_ON(mt, mas.index != 161);
2624 MT_BUG_ON(mt, mas.last != 169);
2626 /* Check one gap that does fit above the min */
2628 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2629 MT_BUG_ON(mt, mas.index != 216);
2630 MT_BUG_ON(mt, mas.last != 218);
2632 /* Check size that doesn't fit any gap */
2634 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2637 * Check size that doesn't fit the lower end of the window but
2641 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2644 * Check size that doesn't fit the upper end of the window but
2648 MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2650 /* Check mas_empty_area forward */
2652 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2653 MT_BUG_ON(mt, mas.index != 0);
2654 MT_BUG_ON(mt, mas.last != 8);
2657 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2658 MT_BUG_ON(mt, mas.index != 0);
2659 MT_BUG_ON(mt, mas.last != 3);
2662 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2665 MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2668 MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EBUSY);
2671 mas_empty_area(&mas, 100, 165, 3);
2674 MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2678 static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2680 const unsigned long max = 0x25D78000;
2683 MA_STATE(mas, mt, 0, 0);
2685 mt_set_non_kernel(99999);
2686 for (shift = 12; shift <= 16; shift++) {
2692 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2693 MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2694 mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2700 /* No space left. */
2703 MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2706 /* Fill a depth 3 node to the maximum */
2707 for (unsigned long i = 629440511; i <= 629440800; i += 6)
2708 mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2709 /* Make space in the second-last depth 4 node */
2710 mtree_erase(mt, 631668735);
2711 /* Make space in the last depth 4 node */
2712 mtree_erase(mt, 629506047);
2714 /* Search from just after the gap in the second-last depth 4 */
2716 MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
2718 mt_set_non_kernel(0);
2721 static DEFINE_MTREE(tree);
2722 static int __init maple_tree_seed(void)
2724 unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
2725 1001, 1002, 1003, 1005, 0,
2729 pr_info("\nTEST STARTING\n\n");
2731 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2732 check_root_expand(&tree);
2733 mtree_destroy(&tree);
2735 #if defined(BENCH_SLOT_STORE)
2737 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2738 bench_slot_store(&tree);
2739 mtree_destroy(&tree);
2742 #if defined(BENCH_NODE_STORE)
2744 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2745 bench_node_store(&tree);
2746 mtree_destroy(&tree);
2749 #if defined(BENCH_AWALK)
2751 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2753 mtree_destroy(&tree);
2756 #if defined(BENCH_WALK)
2758 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2760 mtree_destroy(&tree);
2763 #if defined(BENCH_FORK)
2765 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2766 bench_forking(&tree);
2767 mtree_destroy(&tree);
2770 #if defined(BENCH_MT_FOR_EACH)
2772 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2773 bench_mt_for_each(&tree);
2774 mtree_destroy(&tree);
2778 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2779 check_iteration(&tree);
2780 mtree_destroy(&tree);
2782 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2783 check_forking(&tree);
2784 mtree_destroy(&tree);
2786 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2787 check_mas_store_gfp(&tree);
2788 mtree_destroy(&tree);
2790 /* Test ranges (store and insert) */
2791 mt_init_flags(&tree, 0);
2792 check_ranges(&tree);
2793 mtree_destroy(&tree);
2795 #if defined(CONFIG_64BIT)
2796 /* These tests have ranges outside of 4GB */
2797 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2798 check_alloc_range(&tree);
2799 mtree_destroy(&tree);
2801 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2802 check_alloc_rev_range(&tree);
2803 mtree_destroy(&tree);
2806 mt_init_flags(&tree, 0);
2808 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
2810 check_insert(&tree, set[9], &tree); /* Insert 0 */
2811 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
2812 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
2814 check_insert(&tree, set[10], ptr); /* Insert 5003 */
2815 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
2816 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
2817 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
2819 /* Clear out the tree */
2820 mtree_destroy(&tree);
2822 /* Try to insert, insert a dup, and load back what was inserted. */
2823 mt_init_flags(&tree, 0);
2824 check_insert(&tree, set[0], &tree); /* Insert 5015 */
2825 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
2826 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
2829 * Second set of tests try to load a value that doesn't exist, inserts
2830 * a second value, then loads the value again
2832 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
2833 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
2834 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
2835 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
2837 * Tree currently contains:
2838 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
2840 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
2841 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
2843 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
2844 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
2845 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
2846 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
2848 /* Clear out tree */
2849 mtree_destroy(&tree);
2851 mt_init_flags(&tree, 0);
2852 /* Test inserting into a NULL hole. */
2853 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
2854 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
2855 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
2856 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
2857 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
2858 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
2860 /* Clear out the tree */
2861 mtree_destroy(&tree);
2863 mt_init_flags(&tree, 0);
2865 * set[] = {5015, 5014, 5017, 25, 1000,
2866 * 1001, 1002, 1003, 1005, 0,
2870 check_insert(&tree, set[0], ptr); /* 5015 */
2871 check_insert(&tree, set[1], &tree); /* 5014 */
2872 check_insert(&tree, set[2], ptr); /* 5017 */
2873 check_insert(&tree, set[3], &tree); /* 25 */
2874 check_load(&tree, set[0], ptr);
2875 check_load(&tree, set[1], &tree);
2876 check_load(&tree, set[2], ptr);
2877 check_load(&tree, set[3], &tree);
2878 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
2879 check_load(&tree, set[0], ptr);
2880 check_load(&tree, set[1], &tree);
2881 check_load(&tree, set[2], ptr);
2882 check_load(&tree, set[3], &tree); /*25 */
2883 check_load(&tree, set[4], ptr);
2884 check_insert(&tree, set[5], &tree); /* 1001 */
2885 check_load(&tree, set[0], ptr);
2886 check_load(&tree, set[1], &tree);
2887 check_load(&tree, set[2], ptr);
2888 check_load(&tree, set[3], &tree);
2889 check_load(&tree, set[4], ptr);
2890 check_load(&tree, set[5], &tree);
2891 check_insert(&tree, set[6], ptr);
2892 check_load(&tree, set[0], ptr);
2893 check_load(&tree, set[1], &tree);
2894 check_load(&tree, set[2], ptr);
2895 check_load(&tree, set[3], &tree);
2896 check_load(&tree, set[4], ptr);
2897 check_load(&tree, set[5], &tree);
2898 check_load(&tree, set[6], ptr);
2899 check_insert(&tree, set[7], &tree);
2900 check_load(&tree, set[0], ptr);
2901 check_insert(&tree, set[8], ptr);
2903 check_insert(&tree, set[9], &tree);
2905 check_load(&tree, set[0], ptr);
2906 check_load(&tree, set[1], &tree);
2907 check_load(&tree, set[2], ptr);
2908 check_load(&tree, set[3], &tree);
2909 check_load(&tree, set[4], ptr);
2910 check_load(&tree, set[5], &tree);
2911 check_load(&tree, set[6], ptr);
2912 check_load(&tree, set[9], &tree);
2913 mtree_destroy(&tree);
2915 mt_init_flags(&tree, 0);
2916 check_seq(&tree, 16, false);
2917 mtree_destroy(&tree);
2919 mt_init_flags(&tree, 0);
2920 check_seq(&tree, 1000, true);
2921 mtree_destroy(&tree);
2923 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2924 check_rev_seq(&tree, 1000, true);
2925 mtree_destroy(&tree);
2927 check_lower_bound_split(&tree);
2928 check_upper_bound_split(&tree);
2929 check_mid_split(&tree);
2931 mt_init_flags(&tree, 0);
2932 check_next_entry(&tree);
2934 check_find_2(&tree);
2935 mtree_destroy(&tree);
2937 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2938 check_prev_entry(&tree);
2939 mtree_destroy(&tree);
2941 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2942 check_gap_combining(&tree);
2943 mtree_destroy(&tree);
2945 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2946 check_node_overwrite(&tree);
2947 mtree_destroy(&tree);
2949 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2950 next_prev_test(&tree);
2951 mtree_destroy(&tree);
2953 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2954 check_spanning_relatives(&tree);
2955 mtree_destroy(&tree);
2957 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2958 check_rev_find(&tree);
2959 mtree_destroy(&tree);
2961 mt_init_flags(&tree, 0);
2962 check_fuzzer(&tree);
2963 mtree_destroy(&tree);
2965 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2967 mtree_destroy(&tree);
2969 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2970 check_bnode_min_spanning(&tree);
2971 mtree_destroy(&tree);
2973 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2974 check_empty_area_window(&tree);
2975 mtree_destroy(&tree);
2977 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2978 check_empty_area_fill(&tree);
2979 mtree_destroy(&tree);
2986 pr_info("maple_tree: %u of %u tests passed\n",
2987 atomic_read(&maple_tree_tests_passed),
2988 atomic_read(&maple_tree_tests_run));
2989 if (atomic_read(&maple_tree_tests_run) ==
2990 atomic_read(&maple_tree_tests_passed))
2996 static void __exit maple_tree_harvest(void)
3001 module_init(maple_tree_seed);
3002 module_exit(maple_tree_harvest);
3003 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
3004 MODULE_LICENSE("GPL");