1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * test_maple_tree.c: Test the maple tree API
4 * Copyright (c) 2018-2022 Oracle Corporation
5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
6 *
7 * Any tests that only require the interface of the tree.
8 */
9
10 #include <linux/maple_tree.h>
11 #include <linux/module.h>
12
13 #define MTREE_ALLOC_MAX 0x2000000000000Ul
14 #ifndef CONFIG_DEBUG_MAPLE_TREE
15 #define CONFIG_DEBUG_MAPLE_TREE
16 #endif
17 #define CONFIG_MAPLE_SEARCH
18 #define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
19
20 /* #define BENCH_SLOT_STORE */
21 /* #define BENCH_NODE_STORE */
22 /* #define BENCH_AWALK */
23 /* #define BENCH_WALK */
24 /* #define BENCH_MT_FOR_EACH */
25 /* #define BENCH_FORK */
26
27 #ifdef __KERNEL__
28 #define mt_set_non_kernel(x) do {} while (0)
29 #define mt_zero_nr_tallocated(x) do {} while (0)
30 #else
31 #define cond_resched() do {} while (0)
32 #endif
33 static
mtree_insert_index(struct maple_tree * mt,unsigned long index,gfp_t gfp)34 int mtree_insert_index(struct maple_tree *mt, unsigned long index, gfp_t gfp)
35 {
36 return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
37 }
38
mtree_erase_index(struct maple_tree * mt,unsigned long index)39 static void mtree_erase_index(struct maple_tree *mt, unsigned long index)
40 {
41 MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
42 MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
43 }
44
mtree_test_insert(struct maple_tree * mt,unsigned long index,void * ptr)45 static int mtree_test_insert(struct maple_tree *mt, unsigned long index,
46 void *ptr)
47 {
48 return mtree_insert(mt, index, ptr, GFP_KERNEL);
49 }
50
mtree_test_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)51 static int mtree_test_store_range(struct maple_tree *mt, unsigned long start,
52 unsigned long end, void *ptr)
53 {
54 return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
55 }
56
mtree_test_store(struct maple_tree * mt,unsigned long start,void * ptr)57 static int mtree_test_store(struct maple_tree *mt, unsigned long start,
58 void *ptr)
59 {
60 return mtree_test_store_range(mt, start, start, ptr);
61 }
62
mtree_test_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr)63 static int mtree_test_insert_range(struct maple_tree *mt, unsigned long start,
64 unsigned long end, void *ptr)
65 {
66 return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
67 }
68
mtree_test_load(struct maple_tree * mt,unsigned long index)69 static void *mtree_test_load(struct maple_tree *mt, unsigned long index)
70 {
71 return mtree_load(mt, index);
72 }
73
mtree_test_erase(struct maple_tree * mt,unsigned long index)74 static void *mtree_test_erase(struct maple_tree *mt, unsigned long index)
75 {
76 return mtree_erase(mt, index);
77 }
78
79 #if defined(CONFIG_64BIT)
check_mtree_alloc_range(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)80 static noinline void check_mtree_alloc_range(struct maple_tree *mt,
81 unsigned long start, unsigned long end, unsigned long size,
82 unsigned long expected, int eret, void *ptr)
83 {
84
85 unsigned long result = expected + 1;
86 int ret;
87
88 ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
89 GFP_KERNEL);
90 MT_BUG_ON(mt, ret != eret);
91 if (ret)
92 return;
93
94 MT_BUG_ON(mt, result != expected);
95 }
96
check_mtree_alloc_rrange(struct maple_tree * mt,unsigned long start,unsigned long end,unsigned long size,unsigned long expected,int eret,void * ptr)97 static noinline void check_mtree_alloc_rrange(struct maple_tree *mt,
98 unsigned long start, unsigned long end, unsigned long size,
99 unsigned long expected, int eret, void *ptr)
100 {
101
102 unsigned long result = expected + 1;
103 int ret;
104
105 ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end - 1,
106 GFP_KERNEL);
107 MT_BUG_ON(mt, ret != eret);
108 if (ret)
109 return;
110
111 MT_BUG_ON(mt, result != expected);
112 }
113 #endif
114
check_load(struct maple_tree * mt,unsigned long index,void * ptr)115 static noinline void check_load(struct maple_tree *mt, unsigned long index,
116 void *ptr)
117 {
118 void *ret = mtree_test_load(mt, index);
119
120 if (ret != ptr)
121 pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
122 MT_BUG_ON(mt, ret != ptr);
123 }
124
check_store_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)125 static noinline void check_store_range(struct maple_tree *mt,
126 unsigned long start, unsigned long end, void *ptr, int expected)
127 {
128 int ret = -EINVAL;
129 unsigned long i;
130
131 ret = mtree_test_store_range(mt, start, end, ptr);
132 MT_BUG_ON(mt, ret != expected);
133
134 if (ret)
135 return;
136
137 for (i = start; i <= end; i++)
138 check_load(mt, i, ptr);
139 }
140
check_insert_range(struct maple_tree * mt,unsigned long start,unsigned long end,void * ptr,int expected)141 static noinline void check_insert_range(struct maple_tree *mt,
142 unsigned long start, unsigned long end, void *ptr, int expected)
143 {
144 int ret = -EINVAL;
145 unsigned long i;
146
147 ret = mtree_test_insert_range(mt, start, end, ptr);
148 MT_BUG_ON(mt, ret != expected);
149
150 if (ret)
151 return;
152
153 for (i = start; i <= end; i++)
154 check_load(mt, i, ptr);
155 }
156
check_insert(struct maple_tree * mt,unsigned long index,void * ptr)157 static noinline void check_insert(struct maple_tree *mt, unsigned long index,
158 void *ptr)
159 {
160 int ret = -EINVAL;
161
162 ret = mtree_test_insert(mt, index, ptr);
163 MT_BUG_ON(mt, ret != 0);
164 }
165
check_dup_insert(struct maple_tree * mt,unsigned long index,void * ptr)166 static noinline void check_dup_insert(struct maple_tree *mt,
167 unsigned long index, void *ptr)
168 {
169 int ret = -EINVAL;
170
171 ret = mtree_test_insert(mt, index, ptr);
172 MT_BUG_ON(mt, ret != -EEXIST);
173 }
174
175
176 static noinline
check_index_load(struct maple_tree * mt,unsigned long index)177 void check_index_load(struct maple_tree *mt, unsigned long index)
178 {
179 return check_load(mt, index, xa_mk_value(index & LONG_MAX));
180 }
181
not_empty(struct maple_node * node)182 static inline int not_empty(struct maple_node *node)
183 {
184 int i;
185
186 if (node->parent)
187 return 1;
188
189 for (i = 0; i < ARRAY_SIZE(node->slot); i++)
190 if (node->slot[i])
191 return 1;
192
193 return 0;
194 }
195
196
check_rev_seq(struct maple_tree * mt,unsigned long max,bool verbose)197 static noinline void check_rev_seq(struct maple_tree *mt, unsigned long max,
198 bool verbose)
199 {
200 unsigned long i = max, j;
201
202 MT_BUG_ON(mt, !mtree_empty(mt));
203
204 mt_zero_nr_tallocated();
205 while (i) {
206 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
207 for (j = i; j <= max; j++)
208 check_index_load(mt, j);
209
210 check_load(mt, i - 1, NULL);
211 mt_set_in_rcu(mt);
212 MT_BUG_ON(mt, !mt_height(mt));
213 mt_clear_in_rcu(mt);
214 MT_BUG_ON(mt, !mt_height(mt));
215 i--;
216 }
217 check_load(mt, max + 1, NULL);
218
219 #ifndef __KERNEL__
220 if (verbose) {
221 rcu_barrier();
222 mt_dump(mt);
223 pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
224 __func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
225 mt_nr_tallocated());
226 }
227 #endif
228 }
229
check_seq(struct maple_tree * mt,unsigned long max,bool verbose)230 static noinline void check_seq(struct maple_tree *mt, unsigned long max,
231 bool verbose)
232 {
233 unsigned long i, j;
234
235 MT_BUG_ON(mt, !mtree_empty(mt));
236
237 mt_zero_nr_tallocated();
238 for (i = 0; i <= max; i++) {
239 MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
240 for (j = 0; j <= i; j++)
241 check_index_load(mt, j);
242
243 if (i)
244 MT_BUG_ON(mt, !mt_height(mt));
245 check_load(mt, i + 1, NULL);
246 }
247
248 #ifndef __KERNEL__
249 if (verbose) {
250 rcu_barrier();
251 mt_dump(mt);
252 pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
253 max, mt_get_alloc_size()/1024, mt_nr_allocated(),
254 mt_nr_tallocated());
255 }
256 #endif
257 }
258
check_lb_not_empty(struct maple_tree * mt)259 static noinline void check_lb_not_empty(struct maple_tree *mt)
260 {
261 unsigned long i, j;
262 unsigned long huge = 4000UL * 1000 * 1000;
263
264
265 i = huge;
266 while (i > 4096) {
267 check_insert(mt, i, (void *) i);
268 for (j = huge; j >= i; j /= 2) {
269 check_load(mt, j-1, NULL);
270 check_load(mt, j, (void *) j);
271 check_load(mt, j+1, NULL);
272 }
273 i /= 2;
274 }
275 mtree_destroy(mt);
276 }
277
check_lower_bound_split(struct maple_tree * mt)278 static noinline void check_lower_bound_split(struct maple_tree *mt)
279 {
280 MT_BUG_ON(mt, !mtree_empty(mt));
281 check_lb_not_empty(mt);
282 }
283
check_upper_bound_split(struct maple_tree * mt)284 static noinline void check_upper_bound_split(struct maple_tree *mt)
285 {
286 unsigned long i, j;
287 unsigned long huge;
288
289 MT_BUG_ON(mt, !mtree_empty(mt));
290
291 if (MAPLE_32BIT)
292 huge = 2147483647UL;
293 else
294 huge = 4000UL * 1000 * 1000;
295
296 i = 4096;
297 while (i < huge) {
298 check_insert(mt, i, (void *) i);
299 for (j = i; j >= huge; j *= 2) {
300 check_load(mt, j-1, NULL);
301 check_load(mt, j, (void *) j);
302 check_load(mt, j+1, NULL);
303 }
304 i *= 2;
305 }
306 mtree_destroy(mt);
307 }
308
check_mid_split(struct maple_tree * mt)309 static noinline void check_mid_split(struct maple_tree *mt)
310 {
311 unsigned long huge = 8000UL * 1000 * 1000;
312
313 check_insert(mt, huge, (void *) huge);
314 check_insert(mt, 0, xa_mk_value(0));
315 check_lb_not_empty(mt);
316 }
317
check_rev_find(struct maple_tree * mt)318 static noinline void check_rev_find(struct maple_tree *mt)
319 {
320 int i, nr_entries = 200;
321 void *val;
322 MA_STATE(mas, mt, 0, 0);
323
324 for (i = 0; i <= nr_entries; i++)
325 mtree_store_range(mt, i*10, i*10 + 5,
326 xa_mk_value(i), GFP_KERNEL);
327
328 rcu_read_lock();
329 mas_set(&mas, 1000);
330 val = mas_find_rev(&mas, 1000);
331 MT_BUG_ON(mt, val != xa_mk_value(100));
332 val = mas_find_rev(&mas, 1000);
333 MT_BUG_ON(mt, val != NULL);
334
335 mas_set(&mas, 999);
336 val = mas_find_rev(&mas, 997);
337 MT_BUG_ON(mt, val != NULL);
338
339 mas_set(&mas, 1000);
340 val = mas_find_rev(&mas, 900);
341 MT_BUG_ON(mt, val != xa_mk_value(100));
342 val = mas_find_rev(&mas, 900);
343 MT_BUG_ON(mt, val != xa_mk_value(99));
344
345 mas_set(&mas, 20);
346 val = mas_find_rev(&mas, 0);
347 MT_BUG_ON(mt, val != xa_mk_value(2));
348 val = mas_find_rev(&mas, 0);
349 MT_BUG_ON(mt, val != xa_mk_value(1));
350 val = mas_find_rev(&mas, 0);
351 MT_BUG_ON(mt, val != xa_mk_value(0));
352 val = mas_find_rev(&mas, 0);
353 MT_BUG_ON(mt, val != NULL);
354 rcu_read_unlock();
355 }
356
check_find(struct maple_tree * mt)357 static noinline void check_find(struct maple_tree *mt)
358 {
359 unsigned long val = 0;
360 unsigned long count;
361 unsigned long max;
362 unsigned long top;
363 unsigned long last = 0, index = 0;
364 void *entry, *entry2;
365
366 MA_STATE(mas, mt, 0, 0);
367
368 /* Insert 0. */
369 MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
370
371 #if defined(CONFIG_64BIT)
372 top = 4398046511104UL;
373 #else
374 top = ULONG_MAX;
375 #endif
376
377 if (MAPLE_32BIT) {
378 count = 15;
379 } else {
380 count = 20;
381 }
382
383 for (int i = 0; i <= count; i++) {
384 if (val != 64)
385 MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
386 else
387 MT_BUG_ON(mt, mtree_insert(mt, val,
388 XA_ZERO_ENTRY, GFP_KERNEL));
389
390 val <<= 2;
391 }
392
393 val = 0;
394 mas_set(&mas, val);
395 mas_lock(&mas);
396 while ((entry = mas_find(&mas, 268435456)) != NULL) {
397 if (val != 64)
398 MT_BUG_ON(mt, xa_mk_value(val) != entry);
399 else
400 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
401
402 val <<= 2;
403 /* For zero check. */
404 if (!val)
405 val = 1;
406 }
407 mas_unlock(&mas);
408
409 val = 0;
410 mas_set(&mas, val);
411 mas_lock(&mas);
412 mas_for_each(&mas, entry, ULONG_MAX) {
413 if (val != 64)
414 MT_BUG_ON(mt, xa_mk_value(val) != entry);
415 else
416 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
417 val <<= 2;
418 /* For zero check. */
419 if (!val)
420 val = 1;
421 }
422 mas_unlock(&mas);
423
424 /* Test mas_pause */
425 val = 0;
426 mas_set(&mas, val);
427 mas_lock(&mas);
428 mas_for_each(&mas, entry, ULONG_MAX) {
429 if (val != 64)
430 MT_BUG_ON(mt, xa_mk_value(val) != entry);
431 else
432 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
433 val <<= 2;
434 /* For zero check. */
435 if (!val)
436 val = 1;
437
438 mas_pause(&mas);
439 mas_unlock(&mas);
440 mas_lock(&mas);
441 }
442 mas_unlock(&mas);
443
444 val = 0;
445 max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
446 mt_for_each(mt, entry, index, max) {
447 MT_BUG_ON(mt, xa_mk_value(val) != entry);
448 val <<= 2;
449 if (val == 64) /* Skip zero entry. */
450 val <<= 2;
451 /* For zero check. */
452 if (!val)
453 val = 1;
454 }
455
456 val = 0;
457 max = 0;
458 index = 0;
459 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
460 mt_for_each(mt, entry, index, ULONG_MAX) {
461 if (val == top)
462 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
463 else
464 MT_BUG_ON(mt, xa_mk_value(val) != entry);
465
466 /* Workaround for 32bit */
467 if ((val << 2) < val)
468 val = ULONG_MAX;
469 else
470 val <<= 2;
471
472 if (val == 64) /* Skip zero entry. */
473 val <<= 2;
474 /* For zero check. */
475 if (!val)
476 val = 1;
477 max++;
478 MT_BUG_ON(mt, max > 25);
479 }
480 mtree_erase_index(mt, ULONG_MAX);
481
482 mas_reset(&mas);
483 index = 17;
484 entry = mt_find(mt, &index, 512);
485 MT_BUG_ON(mt, xa_mk_value(256) != entry);
486
487 mas_reset(&mas);
488 index = 17;
489 entry = mt_find(mt, &index, 20);
490 MT_BUG_ON(mt, entry != NULL);
491
492
493 /* Range check.. */
494 /* Insert ULONG_MAX */
495 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
496
497 val = 0;
498 mas_set(&mas, 0);
499 mas_lock(&mas);
500 mas_for_each(&mas, entry, ULONG_MAX) {
501 if (val == 64)
502 MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
503 else if (val == top)
504 MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
505 else
506 MT_BUG_ON(mt, xa_mk_value(val) != entry);
507
508 /* Workaround for 32bit */
509 if ((val << 2) < val)
510 val = ULONG_MAX;
511 else
512 val <<= 2;
513
514 /* For zero check. */
515 if (!val)
516 val = 1;
517 mas_pause(&mas);
518 mas_unlock(&mas);
519 mas_lock(&mas);
520 }
521 mas_unlock(&mas);
522
523 mas_set(&mas, 1048576);
524 mas_lock(&mas);
525 entry = mas_find(&mas, 1048576);
526 mas_unlock(&mas);
527 MT_BUG_ON(mas.tree, entry == NULL);
528
529 /*
530 * Find last value.
531 * 1. get the expected value, leveraging the existence of an end entry
532 * 2. delete end entry
533 * 3. find the last value but searching for ULONG_MAX and then using
534 * prev
535 */
536 /* First, get the expected result. */
537 mas_lock(&mas);
538 mas_reset(&mas);
539 mas.index = ULONG_MAX; /* start at max.. */
540 entry = mas_find(&mas, ULONG_MAX);
541 entry = mas_prev(&mas, 0);
542 index = mas.index;
543 last = mas.last;
544
545 /* Erase the last entry. */
546 mas_reset(&mas);
547 mas.index = ULONG_MAX;
548 mas.last = ULONG_MAX;
549 mas_erase(&mas);
550
551 /* Get the previous value from MAS_START */
552 mas_reset(&mas);
553 entry2 = mas_prev(&mas, 0);
554
555 /* Check results. */
556 MT_BUG_ON(mt, entry != entry2);
557 MT_BUG_ON(mt, index != mas.index);
558 MT_BUG_ON(mt, last != mas.last);
559
560
561 mas.node = MAS_NONE;
562 mas.index = ULONG_MAX;
563 mas.last = ULONG_MAX;
564 entry2 = mas_prev(&mas, 0);
565 MT_BUG_ON(mt, entry != entry2);
566
567 mas_set(&mas, 0);
568 MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
569
570 mas_unlock(&mas);
571 mtree_destroy(mt);
572 }
573
check_find_2(struct maple_tree * mt)574 static noinline void check_find_2(struct maple_tree *mt)
575 {
576 unsigned long i, j;
577 void *entry;
578
579 MA_STATE(mas, mt, 0, 0);
580 rcu_read_lock();
581 mas_for_each(&mas, entry, ULONG_MAX)
582 MT_BUG_ON(mt, true);
583 rcu_read_unlock();
584
585 for (i = 0; i < 256; i++) {
586 mtree_insert_index(mt, i, GFP_KERNEL);
587 j = 0;
588 mas_set(&mas, 0);
589 rcu_read_lock();
590 mas_for_each(&mas, entry, ULONG_MAX) {
591 MT_BUG_ON(mt, entry != xa_mk_value(j));
592 j++;
593 }
594 rcu_read_unlock();
595 MT_BUG_ON(mt, j != i + 1);
596 }
597
598 for (i = 0; i < 256; i++) {
599 mtree_erase_index(mt, i);
600 j = i + 1;
601 mas_set(&mas, 0);
602 rcu_read_lock();
603 mas_for_each(&mas, entry, ULONG_MAX) {
604 if (xa_is_zero(entry))
605 continue;
606
607 MT_BUG_ON(mt, entry != xa_mk_value(j));
608 j++;
609 }
610 rcu_read_unlock();
611 MT_BUG_ON(mt, j != 256);
612 }
613
614 /*MT_BUG_ON(mt, !mtree_empty(mt)); */
615 }
616
617
618 #if defined(CONFIG_64BIT)
check_alloc_rev_range(struct maple_tree * mt)619 static noinline void check_alloc_rev_range(struct maple_tree *mt)
620 {
621 /*
622 * Generated by:
623 * cat /proc/self/maps | awk '{print $1}'|
624 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
625 */
626
627 unsigned long range[] = {
628 /* Inclusive , Exclusive. */
629 0x565234af2000, 0x565234af4000,
630 0x565234af4000, 0x565234af9000,
631 0x565234af9000, 0x565234afb000,
632 0x565234afc000, 0x565234afd000,
633 0x565234afd000, 0x565234afe000,
634 0x565235def000, 0x565235e10000,
635 0x7f36d4bfd000, 0x7f36d4ee2000,
636 0x7f36d4ee2000, 0x7f36d4f04000,
637 0x7f36d4f04000, 0x7f36d504c000,
638 0x7f36d504c000, 0x7f36d5098000,
639 0x7f36d5098000, 0x7f36d5099000,
640 0x7f36d5099000, 0x7f36d509d000,
641 0x7f36d509d000, 0x7f36d509f000,
642 0x7f36d509f000, 0x7f36d50a5000,
643 0x7f36d50b9000, 0x7f36d50db000,
644 0x7f36d50db000, 0x7f36d50dc000,
645 0x7f36d50dc000, 0x7f36d50fa000,
646 0x7f36d50fa000, 0x7f36d5102000,
647 0x7f36d5102000, 0x7f36d5103000,
648 0x7f36d5103000, 0x7f36d5104000,
649 0x7f36d5104000, 0x7f36d5105000,
650 0x7fff5876b000, 0x7fff5878d000,
651 0x7fff5878e000, 0x7fff58791000,
652 0x7fff58791000, 0x7fff58793000,
653 };
654
655 unsigned long holes[] = {
656 /*
657 * Note: start of hole is INCLUSIVE
658 * end of hole is EXCLUSIVE
659 * (opposite of the above table.)
660 * Start of hole, end of hole, size of hole (+1)
661 */
662 0x565234afb000, 0x565234afc000, 0x1000,
663 0x565234afe000, 0x565235def000, 0x12F1000,
664 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
665 };
666
667 /*
668 * req_range consists of 4 values.
669 * 1. min index
670 * 2. max index
671 * 3. size
672 * 4. number that should be returned.
673 * 5. return value
674 */
675 unsigned long req_range[] = {
676 0x565234af9000, /* Min */
677 0x7fff58791000, /* Max */
678 0x1000, /* Size */
679 0x7fff5878d << 12, /* First rev hole of size 0x1000 */
680 0, /* Return value success. */
681
682 0x0, /* Min */
683 0x565234AF1 << 12, /* Max */
684 0x3000, /* Size */
685 0x565234AEE << 12, /* max - 3. */
686 0, /* Return value success. */
687
688 0x0, /* Min */
689 -1, /* Max */
690 0x1000, /* Size */
691 562949953421311 << 12,/* First rev hole of size 0x1000 */
692 0, /* Return value success. */
693
694 0x0, /* Min */
695 0x7F36D510A << 12, /* Max */
696 0x4000, /* Size */
697 0x7F36D5106 << 12, /* First rev hole of size 0x4000 */
698 0, /* Return value success. */
699
700 /* Ascend test. */
701 0x0,
702 34148798629 << 12,
703 19 << 12,
704 34148797418 << 12,
705 0x0,
706
707 /* Too big test. */
708 0x0,
709 18446744073709551615UL,
710 562915594369134UL << 12,
711 0x0,
712 -EBUSY,
713
714 };
715
716 int i, range_count = ARRAY_SIZE(range);
717 int req_range_count = ARRAY_SIZE(req_range);
718 unsigned long min = 0;
719
720 MA_STATE(mas, mt, 0, 0);
721
722 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
723 GFP_KERNEL);
724 #define DEBUG_REV_RANGE 0
725 for (i = 0; i < range_count; i += 2) {
726 /* Inclusive, Inclusive (with the -1) */
727
728 #if DEBUG_REV_RANGE
729 pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
730 (range[i + 1] >> 12) - 1);
731 #endif
732 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
733 xa_mk_value(range[i] >> 12), 0);
734 mt_validate(mt);
735 }
736
737
738 mas_lock(&mas);
739 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
740 #if DEBUG_REV_RANGE
741 pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
742 min, holes[i+1]>>12, holes[i+2]>>12,
743 holes[i] >> 12);
744 #endif
745 MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
746 holes[i+1] >> 12,
747 holes[i+2] >> 12));
748 #if DEBUG_REV_RANGE
749 pr_debug("Found %lu %lu\n", mas.index, mas.last);
750 pr_debug("gap %lu %lu\n", (holes[i] >> 12),
751 (holes[i+1] >> 12));
752 #endif
753 MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
754 MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
755 min = holes[i+1] >> 12;
756 mas_reset(&mas);
757 }
758
759 mas_unlock(&mas);
760 for (i = 0; i < req_range_count; i += 5) {
761 #if DEBUG_REV_RANGE
762 pr_debug("\tReverse request between %lu-%lu size %lu, should get %lu\n",
763 req_range[i] >> 12,
764 (req_range[i + 1] >> 12) - 1,
765 req_range[i+2] >> 12,
766 req_range[i+3] >> 12);
767 #endif
768 check_mtree_alloc_rrange(mt,
769 req_range[i] >> 12, /* start */
770 req_range[i+1] >> 12, /* end */
771 req_range[i+2] >> 12, /* size */
772 req_range[i+3] >> 12, /* expected address */
773 req_range[i+4], /* expected return */
774 xa_mk_value(req_range[i] >> 12)); /* pointer */
775 mt_validate(mt);
776 }
777
778 mt_set_non_kernel(1);
779 mtree_erase(mt, 34148798727); /* create a deleted range. */
780 check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
781 34148798725, 0, mt);
782
783 mtree_destroy(mt);
784 }
785
check_alloc_range(struct maple_tree * mt)786 static noinline void check_alloc_range(struct maple_tree *mt)
787 {
788 /*
789 * Generated by:
790 * cat /proc/self/maps|awk '{print $1}'|
791 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
792 */
793
794 unsigned long range[] = {
795 /* Inclusive , Exclusive. */
796 0x565234af2000, 0x565234af4000,
797 0x565234af4000, 0x565234af9000,
798 0x565234af9000, 0x565234afb000,
799 0x565234afc000, 0x565234afd000,
800 0x565234afd000, 0x565234afe000,
801 0x565235def000, 0x565235e10000,
802 0x7f36d4bfd000, 0x7f36d4ee2000,
803 0x7f36d4ee2000, 0x7f36d4f04000,
804 0x7f36d4f04000, 0x7f36d504c000,
805 0x7f36d504c000, 0x7f36d5098000,
806 0x7f36d5098000, 0x7f36d5099000,
807 0x7f36d5099000, 0x7f36d509d000,
808 0x7f36d509d000, 0x7f36d509f000,
809 0x7f36d509f000, 0x7f36d50a5000,
810 0x7f36d50b9000, 0x7f36d50db000,
811 0x7f36d50db000, 0x7f36d50dc000,
812 0x7f36d50dc000, 0x7f36d50fa000,
813 0x7f36d50fa000, 0x7f36d5102000,
814 0x7f36d5102000, 0x7f36d5103000,
815 0x7f36d5103000, 0x7f36d5104000,
816 0x7f36d5104000, 0x7f36d5105000,
817 0x7fff5876b000, 0x7fff5878d000,
818 0x7fff5878e000, 0x7fff58791000,
819 0x7fff58791000, 0x7fff58793000,
820 };
821 unsigned long holes[] = {
822 /* Start of hole, end of hole, size of hole (+1) */
823 0x565234afb000, 0x565234afc000, 0x1000,
824 0x565234afe000, 0x565235def000, 0x12F1000,
825 0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
826 };
827
828 /*
829 * req_range consists of 4 values.
830 * 1. min index
831 * 2. max index
832 * 3. size
833 * 4. number that should be returned.
834 * 5. return value
835 */
836 unsigned long req_range[] = {
837 0x565234af9000, /* Min */
838 0x7fff58791000, /* Max */
839 0x1000, /* Size */
840 0x565234afb000, /* First hole in our data of size 1000. */
841 0, /* Return value success. */
842
843 0x0, /* Min */
844 0x7fff58791000, /* Max */
845 0x1F00, /* Size */
846 0x0, /* First hole in our data of size 2000. */
847 0, /* Return value success. */
848
849 /* Test ascend. */
850 34148797436 << 12, /* Min */
851 0x7fff587AF000, /* Max */
852 0x3000, /* Size */
853 34148798629 << 12, /* Expected location */
854 0, /* Return value success. */
855
856 /* Test failing. */
857 34148798623 << 12, /* Min */
858 34148798683 << 12, /* Max */
859 0x15000, /* Size */
860 0, /* Expected location */
861 -EBUSY, /* Return value failed. */
862
863 /* Test filling entire gap. */
864 34148798623 << 12, /* Min */
865 0x7fff587AF000, /* Max */
866 0x10000, /* Size */
867 34148798632 << 12, /* Expected location */
868 0, /* Return value success. */
869
870 /* Test walking off the end of root. */
871 0, /* Min */
872 -1, /* Max */
873 -1, /* Size */
874 0, /* Expected location */
875 -EBUSY, /* Return value failure. */
876
877 /* Test looking for too large a hole across entire range. */
878 0, /* Min */
879 -1, /* Max */
880 4503599618982063UL << 12, /* Size */
881 34359052178 << 12, /* Expected location */
882 -EBUSY, /* Return failure. */
883 };
884 int i, range_count = ARRAY_SIZE(range);
885 int req_range_count = ARRAY_SIZE(req_range);
886 unsigned long min = 0x565234af2000;
887 MA_STATE(mas, mt, 0, 0);
888
889 mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
890 GFP_KERNEL);
891 for (i = 0; i < range_count; i += 2) {
892 #define DEBUG_ALLOC_RANGE 0
893 #if DEBUG_ALLOC_RANGE
894 pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
895 (range[i + 1] >> 12) - 1);
896 mt_dump(mt);
897 #endif
898 check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
899 xa_mk_value(range[i] >> 12), 0);
900 mt_validate(mt);
901 }
902
903
904
905 mas_lock(&mas);
906 for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
907
908 #if DEBUG_ALLOC_RANGE
909 pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
910 holes[i+1] >> 12, holes[i+2] >> 12,
911 min, holes[i+1]);
912 #endif
913 MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
914 holes[i+1] >> 12,
915 holes[i+2] >> 12));
916 MT_BUG_ON(mt, mas.index != holes[i] >> 12);
917 min = holes[i+1];
918 mas_reset(&mas);
919 }
920 mas_unlock(&mas);
921 for (i = 0; i < req_range_count; i += 5) {
922 #if DEBUG_ALLOC_RANGE
923 pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
924 i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
925 req_range[i + 2] >> 12, req_range[i + 3] >> 12,
926 req_range[i], req_range[i+1]);
927 #endif
928 check_mtree_alloc_range(mt,
929 req_range[i] >> 12, /* start */
930 req_range[i+1] >> 12, /* end */
931 req_range[i+2] >> 12, /* size */
932 req_range[i+3] >> 12, /* expected address */
933 req_range[i+4], /* expected return */
934 xa_mk_value(req_range[i] >> 12)); /* pointer */
935 mt_validate(mt);
936 #if DEBUG_ALLOC_RANGE
937 mt_dump(mt);
938 #endif
939 }
940
941 mtree_destroy(mt);
942 }
943 #endif
944
check_ranges(struct maple_tree * mt)945 static noinline void check_ranges(struct maple_tree *mt)
946 {
947 int i, val, val2;
948 unsigned long r[] = {
949 10, 15,
950 20, 25,
951 17, 22, /* Overlaps previous range. */
952 9, 1000, /* Huge. */
953 100, 200,
954 45, 168,
955 118, 128,
956 };
957
958 MT_BUG_ON(mt, !mtree_empty(mt));
959 check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
960 check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
961 check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
962 MT_BUG_ON(mt, !mt_height(mt));
963 /* Store */
964 check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
965 check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
966 check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
967 MT_BUG_ON(mt, !mt_height(mt));
968 mtree_destroy(mt);
969 MT_BUG_ON(mt, mt_height(mt));
970
971 check_seq(mt, 50, false);
972 mt_set_non_kernel(4);
973 check_store_range(mt, 5, 47, xa_mk_value(47), 0);
974 MT_BUG_ON(mt, !mt_height(mt));
975 mtree_destroy(mt);
976
977 /* Create tree of 1-100 */
978 check_seq(mt, 100, false);
979 /* Store 45-168 */
980 mt_set_non_kernel(10);
981 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
982 MT_BUG_ON(mt, !mt_height(mt));
983 mtree_destroy(mt);
984
985 /* Create tree of 1-200 */
986 check_seq(mt, 200, false);
987 /* Store 45-168 */
988 check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
989 MT_BUG_ON(mt, !mt_height(mt));
990 mtree_destroy(mt);
991
992 check_seq(mt, 30, false);
993 check_store_range(mt, 6, 18, xa_mk_value(6), 0);
994 MT_BUG_ON(mt, !mt_height(mt));
995 mtree_destroy(mt);
996
997 /* Overwrite across multiple levels. */
998 /* Create tree of 1-400 */
999 check_seq(mt, 400, false);
1000 mt_set_non_kernel(50);
1001 /* Store 118-128 */
1002 check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1003 mt_set_non_kernel(50);
1004 mtree_test_erase(mt, 140);
1005 mtree_test_erase(mt, 141);
1006 mtree_test_erase(mt, 142);
1007 mtree_test_erase(mt, 143);
1008 mtree_test_erase(mt, 130);
1009 mtree_test_erase(mt, 131);
1010 mtree_test_erase(mt, 132);
1011 mtree_test_erase(mt, 133);
1012 mtree_test_erase(mt, 134);
1013 mtree_test_erase(mt, 135);
1014 check_load(mt, r[12], xa_mk_value(r[12]));
1015 check_load(mt, r[13], xa_mk_value(r[12]));
1016 check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1017 check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1018 check_load(mt, 135, NULL);
1019 check_load(mt, 140, NULL);
1020 mt_set_non_kernel(0);
1021 MT_BUG_ON(mt, !mt_height(mt));
1022 mtree_destroy(mt);
1023
1024
1025
1026 /* Overwrite multiple levels at the end of the tree (slot 7) */
1027 mt_set_non_kernel(50);
1028 check_seq(mt, 400, false);
1029 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1030 check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1031
1032 check_load(mt, 346, xa_mk_value(346));
1033 for (i = 347; i <= 352; i++)
1034 check_load(mt, i, xa_mk_value(347));
1035 for (i = 353; i <= 361; i++)
1036 check_load(mt, i, xa_mk_value(353));
1037 check_load(mt, 362, xa_mk_value(362));
1038 mt_set_non_kernel(0);
1039 MT_BUG_ON(mt, !mt_height(mt));
1040 mtree_destroy(mt);
1041
1042 mt_set_non_kernel(50);
1043 check_seq(mt, 400, false);
1044 check_store_range(mt, 352, 364, NULL, 0);
1045 check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1046 check_load(mt, 350, xa_mk_value(350));
1047 check_load(mt, 351, xa_mk_value(352));
1048 for (i = 352; i <= 363; i++)
1049 check_load(mt, i, xa_mk_value(352));
1050 check_load(mt, 364, NULL);
1051 check_load(mt, 365, xa_mk_value(365));
1052 mt_set_non_kernel(0);
1053 MT_BUG_ON(mt, !mt_height(mt));
1054 mtree_destroy(mt);
1055
1056 mt_set_non_kernel(5);
1057 check_seq(mt, 400, false);
1058 check_store_range(mt, 352, 364, NULL, 0);
1059 check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1060 check_load(mt, 350, xa_mk_value(350));
1061 check_load(mt, 351, xa_mk_value(352));
1062 for (i = 352; i <= 364; i++)
1063 check_load(mt, i, xa_mk_value(352));
1064 check_load(mt, 365, xa_mk_value(365));
1065 mt_set_non_kernel(0);
1066 MT_BUG_ON(mt, !mt_height(mt));
1067 mtree_destroy(mt);
1068
1069
1070 mt_set_non_kernel(50);
1071 check_seq(mt, 400, false);
1072 check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1073 check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074 mt_set_non_kernel(0);
1075 mt_validate(mt);
1076 MT_BUG_ON(mt, !mt_height(mt));
1077 mtree_destroy(mt);
1078 /*
1079 * Interesting cases:
1080 * 1. Overwrite the end of a node and end in the first entry of the next
1081 * node.
1082 * 2. Split a single range
1083 * 3. Overwrite the start of a range
1084 * 4. Overwrite the end of a range
1085 * 5. Overwrite the entire range
1086 * 6. Overwrite a range that causes multiple parent nodes to be
1087 * combined
1088 * 7. Overwrite a range that causes multiple parent nodes and part of
1089 * root to be combined
1090 * 8. Overwrite the whole tree
1091 * 9. Try to overwrite the zero entry of an alloc tree.
1092 * 10. Write a range larger than a nodes current pivot
1093 */
1094
1095 mt_set_non_kernel(50);
1096 for (i = 0; i <= 500; i++) {
1097 val = i*5;
1098 val2 = (i+1)*5;
1099 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1100 }
1101 check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1102 check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1103 check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1104 check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1105 check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1106 mtree_destroy(mt);
1107 mt_set_non_kernel(0);
1108
1109 mt_set_non_kernel(50);
1110 for (i = 0; i <= 500; i++) {
1111 val = i*5;
1112 val2 = (i+1)*5;
1113 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1114 }
1115 check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1116 check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1117 check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1118 check_store_range(mt, 2460, 2470, NULL, 0);
1119 check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1120 check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1121 mt_set_non_kernel(0);
1122 MT_BUG_ON(mt, !mt_height(mt));
1123 mtree_destroy(mt);
1124
1125 /* Test rebalance gaps */
1126 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1127 mt_set_non_kernel(50);
1128 for (i = 0; i <= 50; i++) {
1129 val = i*10;
1130 val2 = (i+1)*10;
1131 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1132 }
1133 check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1134 check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1135 check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1136 check_store_range(mt, 240, 249, NULL, 0);
1137 mtree_erase(mt, 200);
1138 mtree_erase(mt, 210);
1139 mtree_erase(mt, 220);
1140 mtree_erase(mt, 230);
1141 mt_set_non_kernel(0);
1142 MT_BUG_ON(mt, !mt_height(mt));
1143 mtree_destroy(mt);
1144
1145 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1146 for (i = 0; i <= 500; i++) {
1147 val = i*10;
1148 val2 = (i+1)*10;
1149 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1150 }
1151 check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1152 mt_validate(mt);
1153 MT_BUG_ON(mt, !mt_height(mt));
1154 mtree_destroy(mt);
1155
1156 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1157 for (i = 0; i <= 500; i++) {
1158 val = i*10;
1159 val2 = (i+1)*10;
1160 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1161 }
1162 check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1163 check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1164 check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1165 check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1166 check_store_range(mt, 4842, 4849, NULL, 0);
1167 mt_validate(mt);
1168 MT_BUG_ON(mt, !mt_height(mt));
1169 mtree_destroy(mt);
1170
1171 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1172 for (i = 0; i <= 1300; i++) {
1173 val = i*10;
1174 val2 = (i+1)*10;
1175 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1176 MT_BUG_ON(mt, mt_height(mt) >= 4);
1177 }
1178 /* Cause a 3 child split all the way up the tree. */
1179 for (i = 5; i < 215; i += 10)
1180 check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1181 for (i = 5; i < 65; i += 10)
1182 check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1183
1184 MT_BUG_ON(mt, mt_height(mt) >= 4);
1185 for (i = 5; i < 45; i += 10)
1186 check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1187 if (!MAPLE_32BIT)
1188 MT_BUG_ON(mt, mt_height(mt) < 4);
1189 mtree_destroy(mt);
1190
1191
1192 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1193 for (i = 0; i <= 1200; i++) {
1194 val = i*10;
1195 val2 = (i+1)*10;
1196 check_store_range(mt, val, val2, xa_mk_value(val), 0);
1197 MT_BUG_ON(mt, mt_height(mt) >= 4);
1198 }
1199 /* Fill parents and leaves before split. */
1200 for (i = 5; i < 455; i += 10)
1201 check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1202
1203 for (i = 1; i < 16; i++)
1204 check_store_range(mt, 8185 + i, 8185 + i + 1,
1205 xa_mk_value(8185+i), 0);
1206 MT_BUG_ON(mt, mt_height(mt) >= 4);
1207 /* triple split across multiple levels. */
1208 check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1209 if (!MAPLE_32BIT)
1210 MT_BUG_ON(mt, mt_height(mt) != 4);
1211 }
1212
check_next_entry(struct maple_tree * mt)1213 static noinline void check_next_entry(struct maple_tree *mt)
1214 {
1215 void *entry = NULL;
1216 unsigned long limit = 30, i = 0;
1217 MA_STATE(mas, mt, i, i);
1218
1219 MT_BUG_ON(mt, !mtree_empty(mt));
1220
1221 check_seq(mt, limit, false);
1222 rcu_read_lock();
1223
1224 /* Check the first one and get ma_state in the correct state. */
1225 MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1226 for ( ; i <= limit + 1; i++) {
1227 entry = mas_next(&mas, limit);
1228 if (i > limit)
1229 MT_BUG_ON(mt, entry != NULL);
1230 else
1231 MT_BUG_ON(mt, xa_mk_value(i) != entry);
1232 }
1233 rcu_read_unlock();
1234 mtree_destroy(mt);
1235 }
1236
check_prev_entry(struct maple_tree * mt)1237 static noinline void check_prev_entry(struct maple_tree *mt)
1238 {
1239 unsigned long index = 16;
1240 void *value;
1241 int i;
1242
1243 MA_STATE(mas, mt, index, index);
1244
1245 MT_BUG_ON(mt, !mtree_empty(mt));
1246 check_seq(mt, 30, false);
1247
1248 rcu_read_lock();
1249 value = mas_find(&mas, ULONG_MAX);
1250 MT_BUG_ON(mt, value != xa_mk_value(index));
1251 value = mas_prev(&mas, 0);
1252 MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1253 rcu_read_unlock();
1254 mtree_destroy(mt);
1255
1256 /* Check limits on prev */
1257 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1258 mas_lock(&mas);
1259 for (i = 0; i <= index; i++) {
1260 mas_set_range(&mas, i*10, i*10+5);
1261 mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1262 }
1263
1264 mas_set(&mas, 20);
1265 value = mas_walk(&mas);
1266 MT_BUG_ON(mt, value != xa_mk_value(2));
1267
1268 value = mas_prev(&mas, 19);
1269 MT_BUG_ON(mt, value != NULL);
1270
1271 mas_set(&mas, 80);
1272 value = mas_walk(&mas);
1273 MT_BUG_ON(mt, value != xa_mk_value(8));
1274
1275 value = mas_prev(&mas, 76);
1276 MT_BUG_ON(mt, value != NULL);
1277
1278 mas_unlock(&mas);
1279 }
1280
check_root_expand(struct maple_tree * mt)1281 static noinline void check_root_expand(struct maple_tree *mt)
1282 {
1283 MA_STATE(mas, mt, 0, 0);
1284 void *ptr;
1285
1286
1287 mas_lock(&mas);
1288 mas_set(&mas, 3);
1289 ptr = mas_walk(&mas);
1290 MT_BUG_ON(mt, ptr != NULL);
1291 MT_BUG_ON(mt, mas.index != 0);
1292 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1293
1294 ptr = &check_prev_entry;
1295 mas_set(&mas, 1);
1296 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1297
1298 mas_set(&mas, 0);
1299 ptr = mas_walk(&mas);
1300 MT_BUG_ON(mt, ptr != NULL);
1301
1302 mas_set(&mas, 1);
1303 ptr = mas_walk(&mas);
1304 MT_BUG_ON(mt, ptr != &check_prev_entry);
1305
1306 mas_set(&mas, 2);
1307 ptr = mas_walk(&mas);
1308 MT_BUG_ON(mt, ptr != NULL);
1309 mas_unlock(&mas);
1310 mtree_destroy(mt);
1311
1312
1313 mt_init_flags(mt, 0);
1314 mas_lock(&mas);
1315
1316 mas_set(&mas, 0);
1317 ptr = &check_prev_entry;
1318 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1319
1320 mas_set(&mas, 5);
1321 ptr = mas_walk(&mas);
1322 MT_BUG_ON(mt, ptr != NULL);
1323 MT_BUG_ON(mt, mas.index != 1);
1324 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1325
1326 mas_set_range(&mas, 0, 100);
1327 ptr = mas_walk(&mas);
1328 MT_BUG_ON(mt, ptr != &check_prev_entry);
1329 MT_BUG_ON(mt, mas.last != 0);
1330 mas_unlock(&mas);
1331 mtree_destroy(mt);
1332
1333 mt_init_flags(mt, 0);
1334 mas_lock(&mas);
1335
1336 mas_set(&mas, 0);
1337 ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1338 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1339 ptr = mas_next(&mas, ULONG_MAX);
1340 MT_BUG_ON(mt, ptr != NULL);
1341 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1342
1343 mas_set(&mas, 1);
1344 ptr = mas_prev(&mas, 0);
1345 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1346 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1347
1348 mas_unlock(&mas);
1349
1350 mtree_destroy(mt);
1351
1352 mt_init_flags(mt, 0);
1353 mas_lock(&mas);
1354 mas_set(&mas, 0);
1355 ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1356 mas_store_gfp(&mas, ptr, GFP_KERNEL);
1357 ptr = mas_next(&mas, ULONG_MAX);
1358 MT_BUG_ON(mt, ptr != NULL);
1359 MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1360
1361 mas_set(&mas, 1);
1362 ptr = mas_prev(&mas, 0);
1363 MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1364 MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1365
1366
1367 mas_unlock(&mas);
1368 }
1369
check_gap_combining(struct maple_tree * mt)1370 static noinline void check_gap_combining(struct maple_tree *mt)
1371 {
1372 struct maple_enode *mn1, *mn2;
1373 void *entry;
1374 unsigned long singletons = 100;
1375 unsigned long *seq100;
1376 unsigned long seq100_64[] = {
1377 /* 0-5 */
1378 74, 75, 76,
1379 50, 100, 2,
1380
1381 /* 6-12 */
1382 44, 45, 46, 43,
1383 20, 50, 3,
1384
1385 /* 13-20*/
1386 80, 81, 82,
1387 76, 2, 79, 85, 4,
1388 };
1389
1390 unsigned long seq100_32[] = {
1391 /* 0-5 */
1392 61, 62, 63,
1393 50, 100, 2,
1394
1395 /* 6-12 */
1396 31, 32, 33, 30,
1397 20, 50, 3,
1398
1399 /* 13-20*/
1400 80, 81, 82,
1401 76, 2, 79, 85, 4,
1402 };
1403
1404 unsigned long seq2000[] = {
1405 1152, 1151,
1406 1100, 1200, 2,
1407 };
1408 unsigned long seq400[] = {
1409 286, 318,
1410 256, 260, 266, 270, 275, 280, 290, 398,
1411 286, 310,
1412 };
1413
1414 unsigned long index;
1415
1416 MA_STATE(mas, mt, 0, 0);
1417
1418 if (MAPLE_32BIT)
1419 seq100 = seq100_32;
1420 else
1421 seq100 = seq100_64;
1422
1423 index = seq100[0];
1424 mas_set(&mas, index);
1425 MT_BUG_ON(mt, !mtree_empty(mt));
1426 check_seq(mt, singletons, false); /* create 100 singletons. */
1427
1428 mt_set_non_kernel(1);
1429 mtree_test_erase(mt, seq100[2]);
1430 check_load(mt, seq100[2], NULL);
1431 mtree_test_erase(mt, seq100[1]);
1432 check_load(mt, seq100[1], NULL);
1433
1434 rcu_read_lock();
1435 entry = mas_find(&mas, ULONG_MAX);
1436 MT_BUG_ON(mt, entry != xa_mk_value(index));
1437 mn1 = mas.node;
1438 mas_next(&mas, ULONG_MAX);
1439 entry = mas_next(&mas, ULONG_MAX);
1440 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1441 mn2 = mas.node;
1442 MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1443
1444 /*
1445 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1446 * seq100[4]. Search for the gap.
1447 */
1448 mt_set_non_kernel(1);
1449 mas_reset(&mas);
1450 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1451 seq100[5]));
1452 MT_BUG_ON(mt, mas.index != index + 1);
1453 rcu_read_unlock();
1454
1455 mtree_test_erase(mt, seq100[6]);
1456 check_load(mt, seq100[6], NULL);
1457 mtree_test_erase(mt, seq100[7]);
1458 check_load(mt, seq100[7], NULL);
1459 mtree_test_erase(mt, seq100[8]);
1460 index = seq100[9];
1461
1462 rcu_read_lock();
1463 mas.index = index;
1464 mas.last = index;
1465 mas_reset(&mas);
1466 entry = mas_find(&mas, ULONG_MAX);
1467 MT_BUG_ON(mt, entry != xa_mk_value(index));
1468 mn1 = mas.node;
1469 entry = mas_next(&mas, ULONG_MAX);
1470 MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1471 mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1472 mn2 = mas.node;
1473 MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1474
1475 /*
1476 * At this point, there is a gap of 3 at seq100[6]. Find it by
1477 * searching 20 - 50 for size 3.
1478 */
1479 mas_reset(&mas);
1480 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1481 seq100[12]));
1482 MT_BUG_ON(mt, mas.index != seq100[6]);
1483 rcu_read_unlock();
1484
1485 mt_set_non_kernel(1);
1486 mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1487 check_load(mt, seq100[13], NULL);
1488 check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1489 mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1490 check_load(mt, seq100[13], NULL);
1491 check_load(mt, seq100[14], NULL);
1492
1493 mas_reset(&mas);
1494 rcu_read_lock();
1495 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1496 seq100[17]));
1497 MT_BUG_ON(mt, mas.index != seq100[13]);
1498 mt_validate(mt);
1499 rcu_read_unlock();
1500
1501 /*
1502 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1503 * gap.
1504 */
1505 mt_set_non_kernel(2);
1506 mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1507 mtree_test_erase(mt, seq100[15]);
1508 mas_reset(&mas);
1509 rcu_read_lock();
1510 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1511 seq100[20]));
1512 rcu_read_unlock();
1513 MT_BUG_ON(mt, mas.index != seq100[18]);
1514 mt_validate(mt);
1515 mtree_destroy(mt);
1516
1517 /* seq 2000 tests are for multi-level tree gaps */
1518 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1519 check_seq(mt, 2000, false);
1520 mt_set_non_kernel(1);
1521 mtree_test_erase(mt, seq2000[0]);
1522 mtree_test_erase(mt, seq2000[1]);
1523
1524 mt_set_non_kernel(2);
1525 mas_reset(&mas);
1526 rcu_read_lock();
1527 MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1528 seq2000[4]));
1529 MT_BUG_ON(mt, mas.index != seq2000[1]);
1530 rcu_read_unlock();
1531 mt_validate(mt);
1532 mtree_destroy(mt);
1533
1534 /* seq 400 tests rebalancing over two levels. */
1535 mt_set_non_kernel(99);
1536 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1537 check_seq(mt, 400, false);
1538 mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1539 mt_set_non_kernel(0);
1540 mtree_destroy(mt);
1541
1542 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1543 check_seq(mt, 400, false);
1544 mt_set_non_kernel(50);
1545 mtree_test_store_range(mt, seq400[2], seq400[9],
1546 xa_mk_value(seq400[2]));
1547 mtree_test_store_range(mt, seq400[3], seq400[9],
1548 xa_mk_value(seq400[3]));
1549 mtree_test_store_range(mt, seq400[4], seq400[9],
1550 xa_mk_value(seq400[4]));
1551 mtree_test_store_range(mt, seq400[5], seq400[9],
1552 xa_mk_value(seq400[5]));
1553 mtree_test_store_range(mt, seq400[0], seq400[9],
1554 xa_mk_value(seq400[0]));
1555 mtree_test_store_range(mt, seq400[6], seq400[9],
1556 xa_mk_value(seq400[6]));
1557 mtree_test_store_range(mt, seq400[7], seq400[9],
1558 xa_mk_value(seq400[7]));
1559 mtree_test_store_range(mt, seq400[8], seq400[9],
1560 xa_mk_value(seq400[8]));
1561 mtree_test_store_range(mt, seq400[10], seq400[11],
1562 xa_mk_value(seq400[10]));
1563 mt_validate(mt);
1564 mt_set_non_kernel(0);
1565 mtree_destroy(mt);
1566 }
check_node_overwrite(struct maple_tree * mt)1567 static noinline void check_node_overwrite(struct maple_tree *mt)
1568 {
1569 int i, max = 4000;
1570
1571 for (i = 0; i < max; i++)
1572 mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1573
1574 mtree_test_store_range(mt, 319951, 367950, NULL);
1575 /*mt_dump(mt); */
1576 mt_validate(mt);
1577 }
1578
1579 #if defined(BENCH_SLOT_STORE)
bench_slot_store(struct maple_tree * mt)1580 static noinline void bench_slot_store(struct maple_tree *mt)
1581 {
1582 int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1583
1584 for (i = 0; i < max; i += 10)
1585 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1586
1587 for (i = 0; i < count; i++) {
1588 mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1589 mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1590 GFP_KERNEL);
1591 }
1592 }
1593 #endif
1594
1595 #if defined(BENCH_NODE_STORE)
bench_node_store(struct maple_tree * mt)1596 static noinline void bench_node_store(struct maple_tree *mt)
1597 {
1598 int i, overwrite = 76, max = 240, count = 20000000;
1599
1600 for (i = 0; i < max; i += 10)
1601 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1602
1603 for (i = 0; i < count; i++) {
1604 mtree_store_range(mt, overwrite, overwrite + 15,
1605 xa_mk_value(overwrite), GFP_KERNEL);
1606
1607 overwrite += 5;
1608 if (overwrite >= 135)
1609 overwrite = 76;
1610 }
1611 }
1612 #endif
1613
1614 #if defined(BENCH_AWALK)
bench_awalk(struct maple_tree * mt)1615 static noinline void bench_awalk(struct maple_tree *mt)
1616 {
1617 int i, max = 2500, count = 50000000;
1618 MA_STATE(mas, mt, 1470, 1470);
1619
1620 for (i = 0; i < max; i += 10)
1621 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1622
1623 mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1624
1625 for (i = 0; i < count; i++) {
1626 mas_empty_area_rev(&mas, 0, 2000, 10);
1627 mas_reset(&mas);
1628 }
1629 }
1630 #endif
1631 #if defined(BENCH_WALK)
bench_walk(struct maple_tree * mt)1632 static noinline void bench_walk(struct maple_tree *mt)
1633 {
1634 int i, max = 2500, count = 550000000;
1635 MA_STATE(mas, mt, 1470, 1470);
1636
1637 for (i = 0; i < max; i += 10)
1638 mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1639
1640 for (i = 0; i < count; i++) {
1641 mas_walk(&mas);
1642 mas_reset(&mas);
1643 }
1644
1645 }
1646 #endif
1647
1648 #if defined(BENCH_MT_FOR_EACH)
bench_mt_for_each(struct maple_tree * mt)1649 static noinline void bench_mt_for_each(struct maple_tree *mt)
1650 {
1651 int i, count = 1000000;
1652 unsigned long max = 2500, index = 0;
1653 void *entry;
1654
1655 for (i = 0; i < max; i += 5)
1656 mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1657
1658 for (i = 0; i < count; i++) {
1659 unsigned long j = 0;
1660
1661 mt_for_each(mt, entry, index, max) {
1662 MT_BUG_ON(mt, entry != xa_mk_value(j));
1663 j += 5;
1664 }
1665
1666 index = 0;
1667 }
1668
1669 }
1670 #endif
1671
1672 /* check_forking - simulate the kernel forking sequence with the tree. */
check_forking(struct maple_tree * mt)1673 static noinline void check_forking(struct maple_tree *mt)
1674 {
1675
1676 struct maple_tree newmt;
1677 int i, nr_entries = 134;
1678 void *val;
1679 MA_STATE(mas, mt, 0, 0);
1680 MA_STATE(newmas, mt, 0, 0);
1681
1682 for (i = 0; i <= nr_entries; i++)
1683 mtree_store_range(mt, i*10, i*10 + 5,
1684 xa_mk_value(i), GFP_KERNEL);
1685
1686 mt_set_non_kernel(99999);
1687 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1688 newmas.tree = &newmt;
1689 mas_reset(&newmas);
1690 mas_reset(&mas);
1691 mas_lock(&newmas);
1692 mas.index = 0;
1693 mas.last = 0;
1694 if (mas_expected_entries(&newmas, nr_entries)) {
1695 pr_err("OOM!");
1696 BUG_ON(1);
1697 }
1698 rcu_read_lock();
1699 mas_for_each(&mas, val, ULONG_MAX) {
1700 newmas.index = mas.index;
1701 newmas.last = mas.last;
1702 mas_store(&newmas, val);
1703 }
1704 rcu_read_unlock();
1705 mas_destroy(&newmas);
1706 mas_unlock(&newmas);
1707 mt_validate(&newmt);
1708 mt_set_non_kernel(0);
1709 mtree_destroy(&newmt);
1710 }
1711
check_mas_store_gfp(struct maple_tree * mt)1712 static noinline void check_mas_store_gfp(struct maple_tree *mt)
1713 {
1714
1715 struct maple_tree newmt;
1716 int i, nr_entries = 135;
1717 void *val;
1718 MA_STATE(mas, mt, 0, 0);
1719 MA_STATE(newmas, mt, 0, 0);
1720
1721 for (i = 0; i <= nr_entries; i++)
1722 mtree_store_range(mt, i*10, i*10 + 5,
1723 xa_mk_value(i), GFP_KERNEL);
1724
1725 mt_set_non_kernel(99999);
1726 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1727 newmas.tree = &newmt;
1728 rcu_read_lock();
1729 mas_lock(&newmas);
1730 mas_reset(&newmas);
1731 mas_set(&mas, 0);
1732 mas_for_each(&mas, val, ULONG_MAX) {
1733 newmas.index = mas.index;
1734 newmas.last = mas.last;
1735 mas_store_gfp(&newmas, val, GFP_KERNEL);
1736 }
1737 mas_unlock(&newmas);
1738 rcu_read_unlock();
1739 mt_validate(&newmt);
1740 mt_set_non_kernel(0);
1741 mtree_destroy(&newmt);
1742 }
1743
1744 #if defined(BENCH_FORK)
bench_forking(struct maple_tree * mt)1745 static noinline void bench_forking(struct maple_tree *mt)
1746 {
1747
1748 struct maple_tree newmt;
1749 int i, nr_entries = 134, nr_fork = 80000;
1750 void *val;
1751 MA_STATE(mas, mt, 0, 0);
1752 MA_STATE(newmas, mt, 0, 0);
1753
1754 for (i = 0; i <= nr_entries; i++)
1755 mtree_store_range(mt, i*10, i*10 + 5,
1756 xa_mk_value(i), GFP_KERNEL);
1757
1758 for (i = 0; i < nr_fork; i++) {
1759 mt_set_non_kernel(99999);
1760 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
1761 newmas.tree = &newmt;
1762 mas_reset(&newmas);
1763 mas_reset(&mas);
1764 mas.index = 0;
1765 mas.last = 0;
1766 rcu_read_lock();
1767 mas_lock(&newmas);
1768 if (mas_expected_entries(&newmas, nr_entries)) {
1769 printk("OOM!");
1770 BUG_ON(1);
1771 }
1772 mas_for_each(&mas, val, ULONG_MAX) {
1773 newmas.index = mas.index;
1774 newmas.last = mas.last;
1775 mas_store(&newmas, val);
1776 }
1777 mas_destroy(&newmas);
1778 mas_unlock(&newmas);
1779 rcu_read_unlock();
1780 mt_validate(&newmt);
1781 mt_set_non_kernel(0);
1782 mtree_destroy(&newmt);
1783 }
1784 }
1785 #endif
1786
next_prev_test(struct maple_tree * mt)1787 static noinline void next_prev_test(struct maple_tree *mt)
1788 {
1789 int i, nr_entries;
1790 void *val;
1791 MA_STATE(mas, mt, 0, 0);
1792 struct maple_enode *mn;
1793 unsigned long *level2;
1794 unsigned long level2_64[] = {707, 1000, 710, 715, 720, 725};
1795 unsigned long level2_32[] = {1747, 2000, 1750, 1755, 1760, 1765};
1796
1797 if (MAPLE_32BIT) {
1798 nr_entries = 500;
1799 level2 = level2_32;
1800 } else {
1801 nr_entries = 200;
1802 level2 = level2_64;
1803 }
1804
1805 for (i = 0; i <= nr_entries; i++)
1806 mtree_store_range(mt, i*10, i*10 + 5,
1807 xa_mk_value(i), GFP_KERNEL);
1808
1809 mas_lock(&mas);
1810 for (i = 0; i <= nr_entries / 2; i++) {
1811 mas_next(&mas, 1000);
1812 if (mas_is_none(&mas))
1813 break;
1814
1815 }
1816 mas_reset(&mas);
1817 mas_set(&mas, 0);
1818 i = 0;
1819 mas_for_each(&mas, val, 1000) {
1820 i++;
1821 }
1822
1823 mas_reset(&mas);
1824 mas_set(&mas, 0);
1825 i = 0;
1826 mas_for_each(&mas, val, 1000) {
1827 mas_pause(&mas);
1828 i++;
1829 }
1830
1831 /*
1832 * 680 - 685 = 0x61a00001930c
1833 * 686 - 689 = NULL;
1834 * 690 - 695 = 0x61a00001930c
1835 * Check simple next/prev
1836 */
1837 mas_set(&mas, 686);
1838 val = mas_walk(&mas);
1839 MT_BUG_ON(mt, val != NULL);
1840
1841 val = mas_next(&mas, 1000);
1842 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1843 MT_BUG_ON(mt, mas.index != 690);
1844 MT_BUG_ON(mt, mas.last != 695);
1845
1846 val = mas_prev(&mas, 0);
1847 MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
1848 MT_BUG_ON(mt, mas.index != 680);
1849 MT_BUG_ON(mt, mas.last != 685);
1850
1851 val = mas_next(&mas, 1000);
1852 MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
1853 MT_BUG_ON(mt, mas.index != 690);
1854 MT_BUG_ON(mt, mas.last != 695);
1855
1856 val = mas_next(&mas, 1000);
1857 MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
1858 MT_BUG_ON(mt, mas.index != 700);
1859 MT_BUG_ON(mt, mas.last != 705);
1860
1861 /* Check across node boundaries of the tree */
1862 mas_set(&mas, 70);
1863 val = mas_walk(&mas);
1864 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1865 MT_BUG_ON(mt, mas.index != 70);
1866 MT_BUG_ON(mt, mas.last != 75);
1867
1868 val = mas_next(&mas, 1000);
1869 MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
1870 MT_BUG_ON(mt, mas.index != 80);
1871 MT_BUG_ON(mt, mas.last != 85);
1872
1873 val = mas_prev(&mas, 70);
1874 MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
1875 MT_BUG_ON(mt, mas.index != 70);
1876 MT_BUG_ON(mt, mas.last != 75);
1877
1878 /* Check across two levels of the tree */
1879 mas_reset(&mas);
1880 mas_set(&mas, level2[0]);
1881 val = mas_walk(&mas);
1882 MT_BUG_ON(mt, val != NULL);
1883 val = mas_next(&mas, level2[1]);
1884 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1885 MT_BUG_ON(mt, mas.index != level2[2]);
1886 MT_BUG_ON(mt, mas.last != level2[3]);
1887 mn = mas.node;
1888
1889 val = mas_next(&mas, level2[1]);
1890 MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
1891 MT_BUG_ON(mt, mas.index != level2[4]);
1892 MT_BUG_ON(mt, mas.last != level2[5]);
1893 MT_BUG_ON(mt, mn == mas.node);
1894
1895 val = mas_prev(&mas, 0);
1896 MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
1897 MT_BUG_ON(mt, mas.index != level2[2]);
1898 MT_BUG_ON(mt, mas.last != level2[3]);
1899
1900 /* Check running off the end and back on */
1901 mas_set(&mas, nr_entries * 10);
1902 val = mas_walk(&mas);
1903 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
1904 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
1905 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
1906
1907 val = mas_next(&mas, ULONG_MAX);
1908 MT_BUG_ON(mt, val != NULL);
1909 MT_BUG_ON(mt, mas.index != ULONG_MAX);
1910 MT_BUG_ON(mt, mas.last != ULONG_MAX);
1911
1912 val = mas_prev(&mas, 0);
1913 MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
1914 MT_BUG_ON(mt, mas.index != (nr_entries * 10));
1915 MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
1916
1917 /* Check running off the start and back on */
1918 mas_reset(&mas);
1919 mas_set(&mas, 10);
1920 val = mas_walk(&mas);
1921 MT_BUG_ON(mt, val != xa_mk_value(1));
1922 MT_BUG_ON(mt, mas.index != 10);
1923 MT_BUG_ON(mt, mas.last != 15);
1924
1925 val = mas_prev(&mas, 0);
1926 MT_BUG_ON(mt, val != xa_mk_value(0));
1927 MT_BUG_ON(mt, mas.index != 0);
1928 MT_BUG_ON(mt, mas.last != 5);
1929
1930 val = mas_prev(&mas, 0);
1931 MT_BUG_ON(mt, val != NULL);
1932 MT_BUG_ON(mt, mas.index != 0);
1933 MT_BUG_ON(mt, mas.last != 0);
1934
1935 mas.index = 0;
1936 mas.last = 5;
1937 mas_store(&mas, NULL);
1938 mas_reset(&mas);
1939 mas_set(&mas, 10);
1940 mas_walk(&mas);
1941
1942 val = mas_prev(&mas, 0);
1943 MT_BUG_ON(mt, val != NULL);
1944 MT_BUG_ON(mt, mas.index != 0);
1945 MT_BUG_ON(mt, mas.last != 0);
1946 mas_unlock(&mas);
1947
1948 mtree_destroy(mt);
1949
1950 mt_init(mt);
1951 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
1952 mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
1953 rcu_read_lock();
1954 mas_set(&mas, 5);
1955 val = mas_prev(&mas, 4);
1956 MT_BUG_ON(mt, val != NULL);
1957 rcu_read_unlock();
1958 }
1959
1960
1961
1962 /* Test spanning writes that require balancing right sibling or right cousin */
check_spanning_relatives(struct maple_tree * mt)1963 static noinline void check_spanning_relatives(struct maple_tree *mt)
1964 {
1965
1966 unsigned long i, nr_entries = 1000;
1967
1968 for (i = 0; i <= nr_entries; i++)
1969 mtree_store_range(mt, i*10, i*10 + 5,
1970 xa_mk_value(i), GFP_KERNEL);
1971
1972
1973 mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
1974 }
1975
check_fuzzer(struct maple_tree * mt)1976 static noinline void check_fuzzer(struct maple_tree *mt)
1977 {
1978 /*
1979 * 1. Causes a spanning rebalance of a single root node.
1980 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
1981 * entire right side is consumed.
1982 */
1983 mtree_test_insert(mt, 88, (void *)0xb1);
1984 mtree_test_insert(mt, 84, (void *)0xa9);
1985 mtree_test_insert(mt, 2, (void *)0x5);
1986 mtree_test_insert(mt, 4, (void *)0x9);
1987 mtree_test_insert(mt, 14, (void *)0x1d);
1988 mtree_test_insert(mt, 7, (void *)0xf);
1989 mtree_test_insert(mt, 12, (void *)0x19);
1990 mtree_test_insert(mt, 18, (void *)0x25);
1991 mtree_test_store_range(mt, 8, 18, (void *)0x11);
1992 mtree_destroy(mt);
1993
1994
1995 /*
1996 * 2. Cause a spanning rebalance of two nodes in root.
1997 * Fixed by setting mast->r->max correctly.
1998 */
1999 mt_init_flags(mt, 0);
2000 mtree_test_store(mt, 87, (void *)0xaf);
2001 mtree_test_store(mt, 0, (void *)0x1);
2002 mtree_test_load(mt, 4);
2003 mtree_test_insert(mt, 4, (void *)0x9);
2004 mtree_test_store(mt, 8, (void *)0x11);
2005 mtree_test_store(mt, 44, (void *)0x59);
2006 mtree_test_store(mt, 68, (void *)0x89);
2007 mtree_test_store(mt, 2, (void *)0x5);
2008 mtree_test_insert(mt, 43, (void *)0x57);
2009 mtree_test_insert(mt, 24, (void *)0x31);
2010 mtree_test_insert(mt, 844, (void *)0x699);
2011 mtree_test_store(mt, 84, (void *)0xa9);
2012 mtree_test_store(mt, 4, (void *)0x9);
2013 mtree_test_erase(mt, 4);
2014 mtree_test_load(mt, 5);
2015 mtree_test_erase(mt, 0);
2016 mtree_destroy(mt);
2017
2018 /*
2019 * 3. Cause a node overflow on copy
2020 * Fixed by using the correct check for node size in mas_wr_modify()
2021 * Also discovered issue with metadata setting.
2022 */
2023 mt_init_flags(mt, 0);
2024 mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2025 mtree_test_store(mt, 4, (void *)0x9);
2026 mtree_test_erase(mt, 5);
2027 mtree_test_erase(mt, 0);
2028 mtree_test_erase(mt, 4);
2029 mtree_test_store(mt, 5, (void *)0xb);
2030 mtree_test_erase(mt, 5);
2031 mtree_test_store(mt, 5, (void *)0xb);
2032 mtree_test_erase(mt, 5);
2033 mtree_test_erase(mt, 4);
2034 mtree_test_store(mt, 4, (void *)0x9);
2035 mtree_test_store(mt, 444, (void *)0x379);
2036 mtree_test_store(mt, 0, (void *)0x1);
2037 mtree_test_load(mt, 0);
2038 mtree_test_store(mt, 5, (void *)0xb);
2039 mtree_test_erase(mt, 0);
2040 mtree_destroy(mt);
2041
2042 /*
2043 * 4. spanning store failure due to writing incorrect pivot value at
2044 * last slot.
2045 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2046 *
2047 */
2048 mt_init_flags(mt, 0);
2049 mtree_test_insert(mt, 261, (void *)0x20b);
2050 mtree_test_store(mt, 516, (void *)0x409);
2051 mtree_test_store(mt, 6, (void *)0xd);
2052 mtree_test_insert(mt, 5, (void *)0xb);
2053 mtree_test_insert(mt, 1256, (void *)0x9d1);
2054 mtree_test_store(mt, 4, (void *)0x9);
2055 mtree_test_erase(mt, 1);
2056 mtree_test_store(mt, 56, (void *)0x71);
2057 mtree_test_insert(mt, 1, (void *)0x3);
2058 mtree_test_store(mt, 24, (void *)0x31);
2059 mtree_test_erase(mt, 1);
2060 mtree_test_insert(mt, 2263, (void *)0x11af);
2061 mtree_test_insert(mt, 446, (void *)0x37d);
2062 mtree_test_store_range(mt, 6, 45, (void *)0xd);
2063 mtree_test_store_range(mt, 3, 446, (void *)0x7);
2064 mtree_destroy(mt);
2065
2066 /*
2067 * 5. mas_wr_extend_null() may overflow slots.
2068 * Fix by checking against wr_mas->node_end.
2069 */
2070 mt_init_flags(mt, 0);
2071 mtree_test_store(mt, 48, (void *)0x61);
2072 mtree_test_store(mt, 3, (void *)0x7);
2073 mtree_test_load(mt, 0);
2074 mtree_test_store(mt, 88, (void *)0xb1);
2075 mtree_test_store(mt, 81, (void *)0xa3);
2076 mtree_test_insert(mt, 0, (void *)0x1);
2077 mtree_test_insert(mt, 8, (void *)0x11);
2078 mtree_test_insert(mt, 4, (void *)0x9);
2079 mtree_test_insert(mt, 2480, (void *)0x1361);
2080 mtree_test_insert(mt, ULONG_MAX,
2081 (void *)0xffffffffffffffff);
2082 mtree_test_erase(mt, ULONG_MAX);
2083 mtree_destroy(mt);
2084
2085 /*
2086 * 6. When reusing a node with an implied pivot and the node is
2087 * shrinking, old data would be left in the implied slot
2088 * Fixed by checking the last pivot for the mas->max and clear
2089 * accordingly. This only affected the left-most node as that node is
2090 * the only one allowed to end in NULL.
2091 */
2092 mt_init_flags(mt, 0);
2093 mtree_test_erase(mt, 3);
2094 mtree_test_insert(mt, 22, (void *)0x2d);
2095 mtree_test_insert(mt, 15, (void *)0x1f);
2096 mtree_test_load(mt, 2);
2097 mtree_test_insert(mt, 1, (void *)0x3);
2098 mtree_test_insert(mt, 1, (void *)0x3);
2099 mtree_test_insert(mt, 5, (void *)0xb);
2100 mtree_test_erase(mt, 1);
2101 mtree_test_insert(mt, 1, (void *)0x3);
2102 mtree_test_insert(mt, 4, (void *)0x9);
2103 mtree_test_insert(mt, 1, (void *)0x3);
2104 mtree_test_erase(mt, 1);
2105 mtree_test_insert(mt, 2, (void *)0x5);
2106 mtree_test_insert(mt, 1, (void *)0x3);
2107 mtree_test_erase(mt, 3);
2108 mtree_test_insert(mt, 22, (void *)0x2d);
2109 mtree_test_insert(mt, 15, (void *)0x1f);
2110 mtree_test_insert(mt, 2, (void *)0x5);
2111 mtree_test_insert(mt, 1, (void *)0x3);
2112 mtree_test_insert(mt, 8, (void *)0x11);
2113 mtree_test_load(mt, 2);
2114 mtree_test_insert(mt, 1, (void *)0x3);
2115 mtree_test_insert(mt, 1, (void *)0x3);
2116 mtree_test_store(mt, 1, (void *)0x3);
2117 mtree_test_insert(mt, 5, (void *)0xb);
2118 mtree_test_erase(mt, 1);
2119 mtree_test_insert(mt, 1, (void *)0x3);
2120 mtree_test_insert(mt, 4, (void *)0x9);
2121 mtree_test_insert(mt, 1, (void *)0x3);
2122 mtree_test_erase(mt, 1);
2123 mtree_test_insert(mt, 2, (void *)0x5);
2124 mtree_test_insert(mt, 1, (void *)0x3);
2125 mtree_test_erase(mt, 3);
2126 mtree_test_insert(mt, 22, (void *)0x2d);
2127 mtree_test_insert(mt, 15, (void *)0x1f);
2128 mtree_test_insert(mt, 2, (void *)0x5);
2129 mtree_test_insert(mt, 1, (void *)0x3);
2130 mtree_test_insert(mt, 8, (void *)0x11);
2131 mtree_test_insert(mt, 12, (void *)0x19);
2132 mtree_test_erase(mt, 1);
2133 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2134 mtree_test_erase(mt, 62);
2135 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2136 mtree_test_insert(mt, 11, (void *)0x17);
2137 mtree_test_insert(mt, 3, (void *)0x7);
2138 mtree_test_insert(mt, 3, (void *)0x7);
2139 mtree_test_store(mt, 62, (void *)0x7d);
2140 mtree_test_erase(mt, 62);
2141 mtree_test_store_range(mt, 1, 15, (void *)0x3);
2142 mtree_test_erase(mt, 1);
2143 mtree_test_insert(mt, 22, (void *)0x2d);
2144 mtree_test_insert(mt, 12, (void *)0x19);
2145 mtree_test_erase(mt, 1);
2146 mtree_test_insert(mt, 3, (void *)0x7);
2147 mtree_test_store(mt, 62, (void *)0x7d);
2148 mtree_test_erase(mt, 62);
2149 mtree_test_insert(mt, 122, (void *)0xf5);
2150 mtree_test_store(mt, 3, (void *)0x7);
2151 mtree_test_insert(mt, 0, (void *)0x1);
2152 mtree_test_store_range(mt, 0, 1, (void *)0x1);
2153 mtree_test_insert(mt, 85, (void *)0xab);
2154 mtree_test_insert(mt, 72, (void *)0x91);
2155 mtree_test_insert(mt, 81, (void *)0xa3);
2156 mtree_test_insert(mt, 726, (void *)0x5ad);
2157 mtree_test_insert(mt, 0, (void *)0x1);
2158 mtree_test_insert(mt, 1, (void *)0x3);
2159 mtree_test_store(mt, 51, (void *)0x67);
2160 mtree_test_insert(mt, 611, (void *)0x4c7);
2161 mtree_test_insert(mt, 485, (void *)0x3cb);
2162 mtree_test_insert(mt, 1, (void *)0x3);
2163 mtree_test_erase(mt, 1);
2164 mtree_test_insert(mt, 0, (void *)0x1);
2165 mtree_test_insert(mt, 1, (void *)0x3);
2166 mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2167 mtree_test_load(mt, 1);
2168 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2169 mtree_test_insert(mt, 1, (void *)0x3);
2170 mtree_test_erase(mt, 1);
2171 mtree_test_load(mt, 53);
2172 mtree_test_load(mt, 1);
2173 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2174 mtree_test_insert(mt, 222, (void *)0x1bd);
2175 mtree_test_insert(mt, 485, (void *)0x3cb);
2176 mtree_test_insert(mt, 1, (void *)0x3);
2177 mtree_test_erase(mt, 1);
2178 mtree_test_load(mt, 0);
2179 mtree_test_insert(mt, 21, (void *)0x2b);
2180 mtree_test_insert(mt, 3, (void *)0x7);
2181 mtree_test_store(mt, 621, (void *)0x4db);
2182 mtree_test_insert(mt, 0, (void *)0x1);
2183 mtree_test_erase(mt, 5);
2184 mtree_test_insert(mt, 1, (void *)0x3);
2185 mtree_test_store(mt, 62, (void *)0x7d);
2186 mtree_test_erase(mt, 62);
2187 mtree_test_store_range(mt, 1, 0, (void *)0x3);
2188 mtree_test_insert(mt, 22, (void *)0x2d);
2189 mtree_test_insert(mt, 12, (void *)0x19);
2190 mtree_test_erase(mt, 1);
2191 mtree_test_insert(mt, 1, (void *)0x3);
2192 mtree_test_store_range(mt, 4, 62, (void *)0x9);
2193 mtree_test_erase(mt, 62);
2194 mtree_test_erase(mt, 1);
2195 mtree_test_load(mt, 1);
2196 mtree_test_store_range(mt, 1, 22, (void *)0x3);
2197 mtree_test_insert(mt, 1, (void *)0x3);
2198 mtree_test_erase(mt, 1);
2199 mtree_test_load(mt, 53);
2200 mtree_test_load(mt, 1);
2201 mtree_test_store_range(mt, 1, 1, (void *)0x3);
2202 mtree_test_insert(mt, 222, (void *)0x1bd);
2203 mtree_test_insert(mt, 485, (void *)0x3cb);
2204 mtree_test_insert(mt, 1, (void *)0x3);
2205 mtree_test_erase(mt, 1);
2206 mtree_test_insert(mt, 1, (void *)0x3);
2207 mtree_test_load(mt, 0);
2208 mtree_test_load(mt, 0);
2209 mtree_destroy(mt);
2210
2211 /*
2212 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2213 * data by overwriting it first - that way metadata is of no concern.
2214 */
2215 mt_init_flags(mt, 0);
2216 mtree_test_load(mt, 1);
2217 mtree_test_insert(mt, 102, (void *)0xcd);
2218 mtree_test_erase(mt, 2);
2219 mtree_test_erase(mt, 0);
2220 mtree_test_load(mt, 0);
2221 mtree_test_insert(mt, 4, (void *)0x9);
2222 mtree_test_insert(mt, 2, (void *)0x5);
2223 mtree_test_insert(mt, 110, (void *)0xdd);
2224 mtree_test_insert(mt, 1, (void *)0x3);
2225 mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2226 mtree_test_erase(mt, 2);
2227 mtree_test_store(mt, 0, (void *)0x1);
2228 mtree_test_store(mt, 112, (void *)0xe1);
2229 mtree_test_insert(mt, 21, (void *)0x2b);
2230 mtree_test_store(mt, 1, (void *)0x3);
2231 mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2232 mtree_test_store(mt, 2, (void *)0x5);
2233 mtree_test_load(mt, 22);
2234 mtree_test_erase(mt, 2);
2235 mtree_test_store(mt, 210, (void *)0x1a5);
2236 mtree_test_store_range(mt, 0, 2, (void *)0x1);
2237 mtree_test_store(mt, 2, (void *)0x5);
2238 mtree_test_erase(mt, 2);
2239 mtree_test_erase(mt, 22);
2240 mtree_test_erase(mt, 1);
2241 mtree_test_erase(mt, 2);
2242 mtree_test_store(mt, 0, (void *)0x1);
2243 mtree_test_load(mt, 112);
2244 mtree_test_insert(mt, 2, (void *)0x5);
2245 mtree_test_erase(mt, 2);
2246 mtree_test_store(mt, 1, (void *)0x3);
2247 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2248 mtree_test_erase(mt, 0);
2249 mtree_test_erase(mt, 2);
2250 mtree_test_store(mt, 2, (void *)0x5);
2251 mtree_test_erase(mt, 0);
2252 mtree_test_erase(mt, 2);
2253 mtree_test_store(mt, 0, (void *)0x1);
2254 mtree_test_store(mt, 0, (void *)0x1);
2255 mtree_test_erase(mt, 2);
2256 mtree_test_store(mt, 2, (void *)0x5);
2257 mtree_test_erase(mt, 2);
2258 mtree_test_insert(mt, 2, (void *)0x5);
2259 mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2260 mtree_test_erase(mt, 0);
2261 mtree_test_erase(mt, 2);
2262 mtree_test_store(mt, 0, (void *)0x1);
2263 mtree_test_load(mt, 112);
2264 mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2265 mtree_test_store(mt, 2, (void *)0x5);
2266 mtree_test_load(mt, 110);
2267 mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2268 mtree_test_load(mt, 2);
2269 mtree_test_store(mt, 2, (void *)0x5);
2270 mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2271 mtree_test_erase(mt, 12);
2272 mtree_test_store(mt, 2, (void *)0x5);
2273 mtree_test_load(mt, 22);
2274 mtree_destroy(mt);
2275
2276
2277 /*
2278 * 8. When rebalancing or spanning_rebalance(), the max of the new node
2279 * may be set incorrectly to the final pivot and not the right max.
2280 * Fix by setting the left max to orig right max if the entire node is
2281 * consumed.
2282 */
2283 mt_init_flags(mt, 0);
2284 mtree_test_store(mt, 6, (void *)0xd);
2285 mtree_test_store(mt, 67, (void *)0x87);
2286 mtree_test_insert(mt, 15, (void *)0x1f);
2287 mtree_test_insert(mt, 6716, (void *)0x3479);
2288 mtree_test_store(mt, 61, (void *)0x7b);
2289 mtree_test_insert(mt, 13, (void *)0x1b);
2290 mtree_test_store(mt, 8, (void *)0x11);
2291 mtree_test_insert(mt, 1, (void *)0x3);
2292 mtree_test_load(mt, 0);
2293 mtree_test_erase(mt, 67167);
2294 mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2295 mtree_test_insert(mt, 6, (void *)0xd);
2296 mtree_test_erase(mt, 67);
2297 mtree_test_insert(mt, 1, (void *)0x3);
2298 mtree_test_erase(mt, 667167);
2299 mtree_test_insert(mt, 6, (void *)0xd);
2300 mtree_test_store(mt, 67, (void *)0x87);
2301 mtree_test_insert(mt, 5, (void *)0xb);
2302 mtree_test_erase(mt, 1);
2303 mtree_test_insert(mt, 6, (void *)0xd);
2304 mtree_test_erase(mt, 67);
2305 mtree_test_insert(mt, 15, (void *)0x1f);
2306 mtree_test_insert(mt, 67167, (void *)0x20cbf);
2307 mtree_test_insert(mt, 1, (void *)0x3);
2308 mtree_test_load(mt, 7);
2309 mtree_test_insert(mt, 16, (void *)0x21);
2310 mtree_test_insert(mt, 36, (void *)0x49);
2311 mtree_test_store(mt, 67, (void *)0x87);
2312 mtree_test_store(mt, 6, (void *)0xd);
2313 mtree_test_insert(mt, 367, (void *)0x2df);
2314 mtree_test_insert(mt, 115, (void *)0xe7);
2315 mtree_test_store(mt, 0, (void *)0x1);
2316 mtree_test_store_range(mt, 1, 3, (void *)0x3);
2317 mtree_test_store(mt, 1, (void *)0x3);
2318 mtree_test_erase(mt, 67167);
2319 mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2320 mtree_test_store(mt, 1, (void *)0x3);
2321 mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2322 mtree_test_load(mt, 67);
2323 mtree_test_insert(mt, 1, (void *)0x3);
2324 mtree_test_erase(mt, 67167);
2325 mtree_destroy(mt);
2326
2327 /*
2328 * 9. spanning store to the end of data caused an invalid metadata
2329 * length which resulted in a crash eventually.
2330 * Fix by checking if there is a value in pivot before incrementing the
2331 * metadata end in mab_mas_cp(). To ensure this doesn't happen again,
2332 * abstract the two locations this happens into a function called
2333 * mas_leaf_set_meta().
2334 */
2335 mt_init_flags(mt, 0);
2336 mtree_test_insert(mt, 21, (void *)0x2b);
2337 mtree_test_insert(mt, 12, (void *)0x19);
2338 mtree_test_insert(mt, 6, (void *)0xd);
2339 mtree_test_insert(mt, 8, (void *)0x11);
2340 mtree_test_insert(mt, 2, (void *)0x5);
2341 mtree_test_insert(mt, 91, (void *)0xb7);
2342 mtree_test_insert(mt, 18, (void *)0x25);
2343 mtree_test_insert(mt, 81, (void *)0xa3);
2344 mtree_test_store_range(mt, 0, 128, (void *)0x1);
2345 mtree_test_store(mt, 1, (void *)0x3);
2346 mtree_test_erase(mt, 8);
2347 mtree_test_insert(mt, 11, (void *)0x17);
2348 mtree_test_insert(mt, 8, (void *)0x11);
2349 mtree_test_insert(mt, 21, (void *)0x2b);
2350 mtree_test_insert(mt, 2, (void *)0x5);
2351 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2352 mtree_test_erase(mt, ULONG_MAX - 10);
2353 mtree_test_store_range(mt, 0, 281, (void *)0x1);
2354 mtree_test_erase(mt, 2);
2355 mtree_test_insert(mt, 1211, (void *)0x977);
2356 mtree_test_insert(mt, 111, (void *)0xdf);
2357 mtree_test_insert(mt, 13, (void *)0x1b);
2358 mtree_test_insert(mt, 211, (void *)0x1a7);
2359 mtree_test_insert(mt, 11, (void *)0x17);
2360 mtree_test_insert(mt, 5, (void *)0xb);
2361 mtree_test_insert(mt, 1218, (void *)0x985);
2362 mtree_test_insert(mt, 61, (void *)0x7b);
2363 mtree_test_store(mt, 1, (void *)0x3);
2364 mtree_test_insert(mt, 121, (void *)0xf3);
2365 mtree_test_insert(mt, 8, (void *)0x11);
2366 mtree_test_insert(mt, 21, (void *)0x2b);
2367 mtree_test_insert(mt, 2, (void *)0x5);
2368 mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2369 mtree_test_erase(mt, ULONG_MAX - 10);
2370 }
2371
2372 /* duplicate the tree with a specific gap */
check_dup_gaps(struct maple_tree * mt,unsigned long nr_entries,bool zero_start,unsigned long gap)2373 static noinline void check_dup_gaps(struct maple_tree *mt,
2374 unsigned long nr_entries, bool zero_start,
2375 unsigned long gap)
2376 {
2377 unsigned long i = 0;
2378 struct maple_tree newmt;
2379 int ret;
2380 void *tmp;
2381 MA_STATE(mas, mt, 0, 0);
2382 MA_STATE(newmas, &newmt, 0, 0);
2383
2384 if (!zero_start)
2385 i = 1;
2386
2387 mt_zero_nr_tallocated();
2388 for (; i <= nr_entries; i++)
2389 mtree_store_range(mt, i*10, (i+1)*10 - gap,
2390 xa_mk_value(i), GFP_KERNEL);
2391
2392 mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2393 mt_set_non_kernel(99999);
2394 mas_lock(&newmas);
2395 ret = mas_expected_entries(&newmas, nr_entries);
2396 mt_set_non_kernel(0);
2397 MT_BUG_ON(mt, ret != 0);
2398
2399 rcu_read_lock();
2400 mas_for_each(&mas, tmp, ULONG_MAX) {
2401 newmas.index = mas.index;
2402 newmas.last = mas.last;
2403 mas_store(&newmas, tmp);
2404 }
2405 rcu_read_unlock();
2406 mas_destroy(&newmas);
2407 mas_unlock(&newmas);
2408
2409 mtree_destroy(&newmt);
2410 }
2411
2412 /* Duplicate many sizes of trees. Mainly to test expected entry values */
check_dup(struct maple_tree * mt)2413 static noinline void check_dup(struct maple_tree *mt)
2414 {
2415 int i;
2416 int big_start = 100010;
2417
2418 /* Check with a value at zero */
2419 for (i = 10; i < 1000; i++) {
2420 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2421 check_dup_gaps(mt, i, true, 5);
2422 mtree_destroy(mt);
2423 rcu_barrier();
2424 }
2425
2426 cond_resched();
2427 mt_cache_shrink();
2428 /* Check with a value at zero, no gap */
2429 for (i = 1000; i < 2000; i++) {
2430 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2431 check_dup_gaps(mt, i, true, 0);
2432 mtree_destroy(mt);
2433 rcu_barrier();
2434 }
2435
2436 cond_resched();
2437 mt_cache_shrink();
2438 /* Check with a value at zero and unreasonably large */
2439 for (i = big_start; i < big_start + 10; i++) {
2440 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2441 check_dup_gaps(mt, i, true, 5);
2442 mtree_destroy(mt);
2443 rcu_barrier();
2444 }
2445
2446 cond_resched();
2447 mt_cache_shrink();
2448 /* Small to medium size not starting at zero*/
2449 for (i = 200; i < 1000; i++) {
2450 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2451 check_dup_gaps(mt, i, false, 5);
2452 mtree_destroy(mt);
2453 rcu_barrier();
2454 }
2455
2456 cond_resched();
2457 mt_cache_shrink();
2458 /* Unreasonably large not starting at zero*/
2459 for (i = big_start; i < big_start + 10; i++) {
2460 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2461 check_dup_gaps(mt, i, false, 5);
2462 mtree_destroy(mt);
2463 rcu_barrier();
2464 cond_resched();
2465 mt_cache_shrink();
2466 }
2467
2468 /* Check non-allocation tree not starting at zero */
2469 for (i = 1500; i < 3000; i++) {
2470 mt_init_flags(mt, 0);
2471 check_dup_gaps(mt, i, false, 5);
2472 mtree_destroy(mt);
2473 rcu_barrier();
2474 cond_resched();
2475 if (i % 2 == 0)
2476 mt_cache_shrink();
2477 }
2478
2479 mt_cache_shrink();
2480 /* Check non-allocation tree starting at zero */
2481 for (i = 200; i < 1000; i++) {
2482 mt_init_flags(mt, 0);
2483 check_dup_gaps(mt, i, true, 5);
2484 mtree_destroy(mt);
2485 rcu_barrier();
2486 cond_resched();
2487 }
2488
2489 mt_cache_shrink();
2490 /* Unreasonably large */
2491 for (i = big_start + 5; i < big_start + 10; i++) {
2492 mt_init_flags(mt, 0);
2493 check_dup_gaps(mt, i, true, 5);
2494 mtree_destroy(mt);
2495 rcu_barrier();
2496 mt_cache_shrink();
2497 cond_resched();
2498 }
2499 }
2500
check_bnode_min_spanning(struct maple_tree * mt)2501 static noinline void check_bnode_min_spanning(struct maple_tree *mt)
2502 {
2503 int i = 50;
2504 MA_STATE(mas, mt, 0, 0);
2505
2506 mt_set_non_kernel(9999);
2507 mas_lock(&mas);
2508 do {
2509 mas_set_range(&mas, i*10, i*10+9);
2510 mas_store(&mas, check_bnode_min_spanning);
2511 } while (i--);
2512
2513 mas_set_range(&mas, 240, 509);
2514 mas_store(&mas, NULL);
2515 mas_unlock(&mas);
2516 mas_destroy(&mas);
2517 mt_set_non_kernel(0);
2518 }
2519
2520 static DEFINE_MTREE(tree);
maple_tree_seed(void)2521 static int maple_tree_seed(void)
2522 {
2523 unsigned long set[] = {5015, 5014, 5017, 25, 1000,
2524 1001, 1002, 1003, 1005, 0,
2525 5003, 5002};
2526 void *ptr = &set;
2527
2528 pr_info("\nTEST STARTING\n\n");
2529
2530 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2531 check_root_expand(&tree);
2532 mtree_destroy(&tree);
2533
2534 #if defined(BENCH_SLOT_STORE)
2535 #define BENCH
2536 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2537 bench_slot_store(&tree);
2538 mtree_destroy(&tree);
2539 goto skip;
2540 #endif
2541 #if defined(BENCH_NODE_STORE)
2542 #define BENCH
2543 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2544 bench_node_store(&tree);
2545 mtree_destroy(&tree);
2546 goto skip;
2547 #endif
2548 #if defined(BENCH_AWALK)
2549 #define BENCH
2550 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2551 bench_awalk(&tree);
2552 mtree_destroy(&tree);
2553 goto skip;
2554 #endif
2555 #if defined(BENCH_WALK)
2556 #define BENCH
2557 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2558 bench_walk(&tree);
2559 mtree_destroy(&tree);
2560 goto skip;
2561 #endif
2562 #if defined(BENCH_FORK)
2563 #define BENCH
2564 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2565 bench_forking(&tree);
2566 mtree_destroy(&tree);
2567 goto skip;
2568 #endif
2569 #if defined(BENCH_MT_FOR_EACH)
2570 #define BENCH
2571 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2572 bench_mt_for_each(&tree);
2573 mtree_destroy(&tree);
2574 goto skip;
2575 #endif
2576
2577 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2578 check_forking(&tree);
2579 mtree_destroy(&tree);
2580
2581 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2582 check_mas_store_gfp(&tree);
2583 mtree_destroy(&tree);
2584
2585 /* Test ranges (store and insert) */
2586 mt_init_flags(&tree, 0);
2587 check_ranges(&tree);
2588 mtree_destroy(&tree);
2589
2590 #if defined(CONFIG_64BIT)
2591 /* These tests have ranges outside of 4GB */
2592 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2593 check_alloc_range(&tree);
2594 mtree_destroy(&tree);
2595
2596 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2597 check_alloc_rev_range(&tree);
2598 mtree_destroy(&tree);
2599 #endif
2600
2601 mt_init_flags(&tree, 0);
2602
2603 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
2604
2605 check_insert(&tree, set[9], &tree); /* Insert 0 */
2606 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
2607 check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
2608
2609 check_insert(&tree, set[10], ptr); /* Insert 5003 */
2610 check_load(&tree, set[9], &tree); /* See if 0 -> &tree */
2611 check_load(&tree, set[11], NULL); /* See if 5002 -> NULL */
2612 check_load(&tree, set[10], ptr); /* See if 5003 -> ptr */
2613
2614 /* Clear out the tree */
2615 mtree_destroy(&tree);
2616
2617 /* Try to insert, insert a dup, and load back what was inserted. */
2618 mt_init_flags(&tree, 0);
2619 check_insert(&tree, set[0], &tree); /* Insert 5015 */
2620 check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
2621 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
2622
2623 /*
2624 * Second set of tests try to load a value that doesn't exist, inserts
2625 * a second value, then loads the value again
2626 */
2627 check_load(&tree, set[1], NULL); /* See if 5014 -> NULL */
2628 check_insert(&tree, set[1], ptr); /* insert 5014 -> ptr */
2629 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
2630 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
2631 /*
2632 * Tree currently contains:
2633 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
2634 */
2635 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
2636 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
2637
2638 check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
2639 check_load(&tree, set[1], ptr); /* See if 5014 -> ptr */
2640 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
2641 check_load(&tree, set[7], &tree); /* 1003 = &tree ? */
2642
2643 /* Clear out tree */
2644 mtree_destroy(&tree);
2645
2646 mt_init_flags(&tree, 0);
2647 /* Test inserting into a NULL hole. */
2648 check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
2649 check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
2650 check_insert(&tree, set[6], ptr); /* insert 1002 -> ptr */
2651 check_load(&tree, set[5], ptr); /* See if 1001 -> ptr */
2652 check_load(&tree, set[6], ptr); /* See if 1002 -> ptr */
2653 check_load(&tree, set[7], &tree); /* See if 1003 -> &tree */
2654
2655 /* Clear out the tree */
2656 mtree_destroy(&tree);
2657
2658 mt_init_flags(&tree, 0);
2659 /*
2660 * set[] = {5015, 5014, 5017, 25, 1000,
2661 * 1001, 1002, 1003, 1005, 0,
2662 * 5003, 5002};
2663 */
2664
2665 check_insert(&tree, set[0], ptr); /* 5015 */
2666 check_insert(&tree, set[1], &tree); /* 5014 */
2667 check_insert(&tree, set[2], ptr); /* 5017 */
2668 check_insert(&tree, set[3], &tree); /* 25 */
2669 check_load(&tree, set[0], ptr);
2670 check_load(&tree, set[1], &tree);
2671 check_load(&tree, set[2], ptr);
2672 check_load(&tree, set[3], &tree);
2673 check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
2674 check_load(&tree, set[0], ptr);
2675 check_load(&tree, set[1], &tree);
2676 check_load(&tree, set[2], ptr);
2677 check_load(&tree, set[3], &tree); /*25 */
2678 check_load(&tree, set[4], ptr);
2679 check_insert(&tree, set[5], &tree); /* 1001 */
2680 check_load(&tree, set[0], ptr);
2681 check_load(&tree, set[1], &tree);
2682 check_load(&tree, set[2], ptr);
2683 check_load(&tree, set[3], &tree);
2684 check_load(&tree, set[4], ptr);
2685 check_load(&tree, set[5], &tree);
2686 check_insert(&tree, set[6], ptr);
2687 check_load(&tree, set[0], ptr);
2688 check_load(&tree, set[1], &tree);
2689 check_load(&tree, set[2], ptr);
2690 check_load(&tree, set[3], &tree);
2691 check_load(&tree, set[4], ptr);
2692 check_load(&tree, set[5], &tree);
2693 check_load(&tree, set[6], ptr);
2694 check_insert(&tree, set[7], &tree);
2695 check_load(&tree, set[0], ptr);
2696 check_insert(&tree, set[8], ptr);
2697
2698 check_insert(&tree, set[9], &tree);
2699
2700 check_load(&tree, set[0], ptr);
2701 check_load(&tree, set[1], &tree);
2702 check_load(&tree, set[2], ptr);
2703 check_load(&tree, set[3], &tree);
2704 check_load(&tree, set[4], ptr);
2705 check_load(&tree, set[5], &tree);
2706 check_load(&tree, set[6], ptr);
2707 check_load(&tree, set[9], &tree);
2708 mtree_destroy(&tree);
2709
2710 mt_init_flags(&tree, 0);
2711 check_seq(&tree, 16, false);
2712 mtree_destroy(&tree);
2713
2714 mt_init_flags(&tree, 0);
2715 check_seq(&tree, 1000, true);
2716 mtree_destroy(&tree);
2717
2718 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2719 check_rev_seq(&tree, 1000, true);
2720 mtree_destroy(&tree);
2721
2722 check_lower_bound_split(&tree);
2723 check_upper_bound_split(&tree);
2724 check_mid_split(&tree);
2725
2726 mt_init_flags(&tree, 0);
2727 check_next_entry(&tree);
2728 check_find(&tree);
2729 check_find_2(&tree);
2730 mtree_destroy(&tree);
2731
2732 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2733 check_prev_entry(&tree);
2734 mtree_destroy(&tree);
2735
2736 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2737 check_gap_combining(&tree);
2738 mtree_destroy(&tree);
2739
2740 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2741 check_node_overwrite(&tree);
2742 mtree_destroy(&tree);
2743
2744 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2745 next_prev_test(&tree);
2746 mtree_destroy(&tree);
2747
2748 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2749 check_spanning_relatives(&tree);
2750 mtree_destroy(&tree);
2751
2752 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2753 check_rev_find(&tree);
2754 mtree_destroy(&tree);
2755
2756 mt_init_flags(&tree, 0);
2757 check_fuzzer(&tree);
2758 mtree_destroy(&tree);
2759
2760 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2761 check_dup(&tree);
2762 mtree_destroy(&tree);
2763
2764 mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
2765 check_bnode_min_spanning(&tree);
2766 mtree_destroy(&tree);
2767
2768 #if defined(BENCH)
2769 skip:
2770 #endif
2771 rcu_barrier();
2772 pr_info("maple_tree: %u of %u tests passed\n",
2773 atomic_read(&maple_tree_tests_passed),
2774 atomic_read(&maple_tree_tests_run));
2775 if (atomic_read(&maple_tree_tests_run) ==
2776 atomic_read(&maple_tree_tests_passed))
2777 return 0;
2778
2779 return -EINVAL;
2780 }
2781
maple_tree_harvest(void)2782 static void maple_tree_harvest(void)
2783 {
2784
2785 }
2786
2787 module_init(maple_tree_seed);
2788 module_exit(maple_tree_harvest);
2789 MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
2790 MODULE_LICENSE("GPL");
2791