Lines Matching full:mt

29 #define RCU_MT_BUG_ON(test, y) {if (y) { test->stop = true; } MT_BUG_ON(test->mt, y); }
32 struct maple_tree *mt; member
49 struct maple_tree *mt; member
91 static noinline void __init check_new_node(struct maple_tree *mt) in check_new_node() argument
99 MA_STATE(mas, mt, 0, 0); in check_new_node()
104 mtree_lock(mt); in check_new_node()
109 MT_BUG_ON(mt, mas_alloc_req(&mas) != 3); in check_new_node()
111 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
112 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
114 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
116 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
117 MT_BUG_ON(mt, mn == NULL); in check_new_node()
118 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
119 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
123 mtree_unlock(mt); in check_new_node()
127 mtree_lock(mt); in check_new_node()
131 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
134 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
137 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
138 MT_BUG_ON(mt, mn == NULL); in check_new_node()
139 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
140 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
141 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
152 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
154 MT_BUG_ON(mt, mas_alloc_req(&mas) != 0); in check_new_node()
156 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
157 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
158 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
160 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
167 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
170 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
171 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
174 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
175 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
176 MT_BUG_ON(mt, mn == NULL); in check_new_node()
177 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
178 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
179 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
181 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
182 MT_BUG_ON(mt, mas.alloc->node_count); in check_new_node()
185 MT_BUG_ON(mt, mas_alloc_req(&mas) != 2); in check_new_node()
187 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
188 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
189 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
190 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
191 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
194 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
195 MT_BUG_ON(mt, !mn); in check_new_node()
196 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
203 MT_BUG_ON(mt, mas_alloc_req(&mas) != total); in check_new_node()
205 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
225 MT_BUG_ON(mt, mas.alloc->node_count != e); in check_new_node()
227 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
228 MT_BUG_ON(mt, mas_allocated(&mas) != i - 1); in check_new_node()
229 MT_BUG_ON(mt, !mn); in check_new_node()
238 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
241 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
242 MT_BUG_ON(mt, !mn); in check_new_node()
243 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
245 MT_BUG_ON(mt, mas_allocated(&mas) != j); in check_new_node()
247 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
248 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
252 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
256 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
258 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
260 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
266 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
268 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
270 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
272 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
275 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
278 MT_BUG_ON(mt, mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
287 MT_BUG_ON(mt, !mas.alloc); in check_new_node()
293 MT_BUG_ON(mt, !smn->slot[j]); in check_new_node()
299 MT_BUG_ON(mt, mas_allocated(&mas) != total); in check_new_node()
303 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
307 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
310 MT_BUG_ON(mt, mn == NULL); in check_new_node()
311 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
315 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
319 MA_STATE(mas2, mt, 0, 0); in check_new_node()
322 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
325 MT_BUG_ON(mt, mn == NULL); in check_new_node()
326 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
328 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
330 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
331 MT_BUG_ON(mt, mas_allocated(&mas2) != i); in check_new_node()
334 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
336 MT_BUG_ON(mt, mn == NULL); in check_new_node()
337 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
341 MT_BUG_ON(mt, mas_allocated(&mas2) != 0); in check_new_node()
345 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
347 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
348 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
349 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
350 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
353 MT_BUG_ON(mt, mn == NULL); in check_new_node()
354 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
355 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS); in check_new_node()
356 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
359 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
360 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
364 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
365 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
366 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
367 MT_BUG_ON(mt, mas_alloc_req(&mas)); in check_new_node()
368 MT_BUG_ON(mt, mas.alloc->node_count != 1); in check_new_node()
369 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
371 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
372 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
373 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
375 MT_BUG_ON(mt, mas.alloc->node_count != 1); in check_new_node()
376 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
378 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
383 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
387 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
444 MT_BUG_ON(mt, mas_allocated(&mas) != 5); in check_new_node()
449 MT_BUG_ON(mt, mas_allocated(&mas) != 10); in check_new_node()
455 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
460 MT_BUG_ON(mt, mas_allocated(&mas) != 10 + MAPLE_ALLOC_SLOTS - 1); in check_new_node()
463 mtree_unlock(mt); in check_new_node()
469 static noinline void __init check_erase(struct maple_tree *mt, unsigned long index, in check_erase() argument
472 MT_BUG_ON(mt, mtree_test_erase(mt, index) != ptr); in check_erase()
475 #define erase_check_load(mt, i) check_load(mt, set[i], entry[i%2]) argument
476 #define erase_check_insert(mt, i) check_insert(mt, set[i], entry[i%2]) argument
477 #define erase_check_erase(mt, i) check_erase(mt, set[i], entry[i%2]) argument
479 static noinline void __init check_erase_testset(struct maple_tree *mt) in check_erase_testset() argument
497 void *entry[2] = { ptr, mt }; in check_erase_testset()
502 mt_set_in_rcu(mt); in check_erase_testset()
504 erase_check_insert(mt, i); in check_erase_testset()
506 erase_check_load(mt, i); in check_erase_testset()
509 erase_check_erase(mt, 1); in check_erase_testset()
510 erase_check_load(mt, 0); in check_erase_testset()
511 check_load(mt, set[1], NULL); in check_erase_testset()
513 erase_check_load(mt, i); in check_erase_testset()
516 erase_check_erase(mt, 2); in check_erase_testset()
517 erase_check_load(mt, 0); in check_erase_testset()
518 check_load(mt, set[1], NULL); in check_erase_testset()
519 check_load(mt, set[2], NULL); in check_erase_testset()
521 erase_check_insert(mt, 1); in check_erase_testset()
522 erase_check_insert(mt, 2); in check_erase_testset()
525 erase_check_load(mt, i); in check_erase_testset()
528 erase_check_load(mt, 3); in check_erase_testset()
529 erase_check_erase(mt, 1); in check_erase_testset()
530 erase_check_load(mt, 0); in check_erase_testset()
531 check_load(mt, set[1], NULL); in check_erase_testset()
533 erase_check_load(mt, i); in check_erase_testset()
539 root_node = mt->ma_root; in check_erase_testset()
540 erase_check_insert(mt, 1); in check_erase_testset()
542 erase_check_load(mt, 0); in check_erase_testset()
543 check_load(mt, 5016, NULL); in check_erase_testset()
544 erase_check_load(mt, 1); in check_erase_testset()
545 check_load(mt, 5013, NULL); in check_erase_testset()
546 erase_check_load(mt, 2); in check_erase_testset()
547 check_load(mt, 5018, NULL); in check_erase_testset()
548 erase_check_load(mt, 3); in check_erase_testset()
550 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
551 erase_check_load(mt, 0); in check_erase_testset()
552 check_load(mt, 5016, NULL); in check_erase_testset()
553 erase_check_load(mt, 1); in check_erase_testset()
554 check_load(mt, 5013, NULL); in check_erase_testset()
555 check_load(mt, set[2], NULL); in check_erase_testset()
556 check_load(mt, 5018, NULL); in check_erase_testset()
558 erase_check_load(mt, 3); in check_erase_testset()
560 root_node = mt->ma_root; in check_erase_testset()
561 erase_check_insert(mt, 2); in check_erase_testset()
563 erase_check_load(mt, 0); in check_erase_testset()
564 check_load(mt, 5016, NULL); in check_erase_testset()
565 erase_check_load(mt, 1); in check_erase_testset()
566 check_load(mt, 5013, NULL); in check_erase_testset()
567 erase_check_load(mt, 2); in check_erase_testset()
568 check_load(mt, 5018, NULL); in check_erase_testset()
569 erase_check_load(mt, 3); in check_erase_testset()
572 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
573 erase_check_load(mt, 0); in check_erase_testset()
574 check_load(mt, 5016, NULL); in check_erase_testset()
575 check_load(mt, set[2], NULL); in check_erase_testset()
576 erase_check_erase(mt, 0); /* erase 5015 to check append */ in check_erase_testset()
577 check_load(mt, set[0], NULL); in check_erase_testset()
578 check_load(mt, 5016, NULL); in check_erase_testset()
579 erase_check_insert(mt, 4); /* 1000 < Should not split. */ in check_erase_testset()
580 check_load(mt, set[0], NULL); in check_erase_testset()
581 check_load(mt, 5016, NULL); in check_erase_testset()
582 erase_check_load(mt, 1); in check_erase_testset()
583 check_load(mt, 5013, NULL); in check_erase_testset()
584 check_load(mt, set[2], NULL); in check_erase_testset()
585 check_load(mt, 5018, NULL); in check_erase_testset()
586 erase_check_load(mt, 4); in check_erase_testset()
587 check_load(mt, 999, NULL); in check_erase_testset()
588 check_load(mt, 1001, NULL); in check_erase_testset()
589 erase_check_load(mt, 4); in check_erase_testset()
590 if (mt_in_rcu(mt)) in check_erase_testset()
591 MT_BUG_ON(mt, root_node == mt->ma_root); in check_erase_testset()
593 MT_BUG_ON(mt, root_node != mt->ma_root); in check_erase_testset()
596 MT_BUG_ON(mt, !mte_is_leaf(mt->ma_root)); in check_erase_testset()
600 erase_check_insert(mt, 0); in check_erase_testset()
601 erase_check_insert(mt, 2); in check_erase_testset()
604 erase_check_insert(mt, i); in check_erase_testset()
606 erase_check_load(mt, j); in check_erase_testset()
609 erase_check_erase(mt, 14); /*6015 */ in check_erase_testset()
612 check_load(mt, set[i], NULL); in check_erase_testset()
614 erase_check_load(mt, i); in check_erase_testset()
616 erase_check_erase(mt, 16); /*7002 */ in check_erase_testset()
619 check_load(mt, set[i], NULL); in check_erase_testset()
621 erase_check_load(mt, i); in check_erase_testset()
626 erase_check_erase(mt, 13); /*6012 */ in check_erase_testset()
629 check_load(mt, set[i], NULL); in check_erase_testset()
631 erase_check_load(mt, i); in check_erase_testset()
634 erase_check_erase(mt, 15); /*7003 */ in check_erase_testset()
637 check_load(mt, set[i], NULL); in check_erase_testset()
639 erase_check_load(mt, i); in check_erase_testset()
643 erase_check_erase(mt, 17); /*7008 *should* cause coalesce. */ in check_erase_testset()
646 check_load(mt, set[i], NULL); in check_erase_testset()
648 erase_check_load(mt, i); in check_erase_testset()
651 erase_check_erase(mt, 18); /*7012 */ in check_erase_testset()
654 check_load(mt, set[i], NULL); in check_erase_testset()
656 erase_check_load(mt, i); in check_erase_testset()
660 erase_check_erase(mt, 19); /*7015 */ in check_erase_testset()
663 check_load(mt, set[i], NULL); in check_erase_testset()
665 erase_check_load(mt, i); in check_erase_testset()
668 erase_check_erase(mt, 20); /*8003 */ in check_erase_testset()
671 check_load(mt, set[i], NULL); in check_erase_testset()
673 erase_check_load(mt, i); in check_erase_testset()
676 erase_check_erase(mt, 21); /*8002 */ in check_erase_testset()
679 check_load(mt, set[i], NULL); in check_erase_testset()
681 erase_check_load(mt, i); in check_erase_testset()
685 erase_check_erase(mt, 22); /*8008 */ in check_erase_testset()
688 check_load(mt, set[i], NULL); in check_erase_testset()
690 erase_check_load(mt, i); in check_erase_testset()
693 erase_check_erase(mt, i); in check_erase_testset()
697 check_load(mt, set[i], NULL); in check_erase_testset()
699 erase_check_load(mt, i); in check_erase_testset()
705 erase_check_insert(mt, i); in check_erase_testset()
709 erase_check_erase(mt, i); in check_erase_testset()
712 erase_check_load(mt, j); in check_erase_testset()
714 check_load(mt, set[j], NULL); in check_erase_testset()
719 erase_check_erase(mt, i); in check_erase_testset()
722 erase_check_load(mt, j); in check_erase_testset()
724 check_load(mt, set[j], NULL); in check_erase_testset()
727 erase_check_insert(mt, 8); in check_erase_testset()
728 erase_check_insert(mt, 9); in check_erase_testset()
729 erase_check_erase(mt, 8); in check_erase_testset()
736 #define erase_check_store_range(mt, a, i, ptr) mtree_test_store_range(mt, \ argument
986 static noinline void __init check_erase2_testset(struct maple_tree *mt, in check_erase2_testset() argument
995 MA_STATE(mas, mt, 0, 0); in check_erase2_testset()
1002 MA_STATE(mas_start, mt, set[i+1], set[i+1]); in check_erase2_testset()
1003 MA_STATE(mas_end, mt, set[i+2], set[i+2]); in check_erase2_testset()
1030 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1058 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1063 check_erase(mt, set[i+1], xa_mk_value(set[i+1])); in check_erase2_testset()
1067 mt_validate(mt); in check_erase2_testset()
1069 MT_BUG_ON(mt, !mt_height(mt)); in check_erase2_testset()
1071 mt_dump(mt, mt_dump_hex); in check_erase2_testset()
1079 mt_for_each(mt, foo, addr, ULONG_MAX) { in check_erase2_testset()
1082 pr_err("mt: %lu -> %p (%d)\n", addr+1, foo, check); in check_erase2_testset()
1092 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1105 MT_BUG_ON(mt, 1); in check_erase2_testset()
1120 mt_validate(mt); in check_erase2_testset()
1123 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1125 MT_BUG_ON(mt, mtree_load(mas.tree, 0) != NULL); in check_erase2_testset()
1131 static noinline void __init check_erase2_sets(struct maple_tree *mt) in check_erase2_sets() argument
33871 * Gap was found: mt 4041162752 gap_end 4041183232 in check_erase2_sets()
33903 MA_STATE(mas, mt, 0, 0); in check_erase2_sets()
33906 check_erase2_testset(mt, set, ARRAY_SIZE(set)); in check_erase2_sets()
33908 mtree_destroy(mt); in check_erase2_sets()
33910 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33911 check_erase2_testset(mt, set2, ARRAY_SIZE(set2)); in check_erase2_sets()
33913 MT_BUG_ON(mt, !!mt_find(mt, &start, 140735933906943UL)); in check_erase2_sets()
33914 mtree_destroy(mt); in check_erase2_sets()
33917 mt_init_flags(mt, 0); in check_erase2_sets()
33918 check_erase2_testset(mt, set3, ARRAY_SIZE(set3)); in check_erase2_sets()
33920 mtree_destroy(mt); in check_erase2_sets()
33922 mt_init_flags(mt, 0); in check_erase2_sets()
33923 check_erase2_testset(mt, set4, ARRAY_SIZE(set4)); in check_erase2_sets()
33931 mtree_destroy(mt); in check_erase2_sets()
33933 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33935 check_erase2_testset(mt, set5, ARRAY_SIZE(set5)); in check_erase2_sets()
33938 mtree_destroy(mt); in check_erase2_sets()
33940 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33941 check_erase2_testset(mt, set6, ARRAY_SIZE(set6)); in check_erase2_sets()
33943 mtree_destroy(mt); in check_erase2_sets()
33945 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33946 check_erase2_testset(mt, set7, ARRAY_SIZE(set7)); in check_erase2_sets()
33948 mtree_destroy(mt); in check_erase2_sets()
33950 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33951 check_erase2_testset(mt, set8, ARRAY_SIZE(set8)); in check_erase2_sets()
33953 mtree_destroy(mt); in check_erase2_sets()
33955 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33956 check_erase2_testset(mt, set9, ARRAY_SIZE(set9)); in check_erase2_sets()
33958 mtree_destroy(mt); in check_erase2_sets()
33960 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33961 check_erase2_testset(mt, set10, ARRAY_SIZE(set10)); in check_erase2_sets()
33963 mtree_destroy(mt); in check_erase2_sets()
33966 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33967 check_erase2_testset(mt, set11, ARRAY_SIZE(set11)); in check_erase2_sets()
33970 MT_BUG_ON(mt, mas.last != 140014592573439); in check_erase2_sets()
33971 mtree_destroy(mt); in check_erase2_sets()
33974 mas.tree = mt; in check_erase2_sets()
33977 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33978 check_erase2_testset(mt, set12, ARRAY_SIZE(set12)); in check_erase2_sets()
33986 mtree_destroy(mt); in check_erase2_sets()
33989 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33990 check_erase2_testset(mt, set13, ARRAY_SIZE(set13)); in check_erase2_sets()
33991 mtree_erase(mt, 140373516443648); in check_erase2_sets()
33995 mtree_destroy(mt); in check_erase2_sets()
33996 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33997 check_erase2_testset(mt, set14, ARRAY_SIZE(set14)); in check_erase2_sets()
33999 mtree_destroy(mt); in check_erase2_sets()
34001 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34002 check_erase2_testset(mt, set15, ARRAY_SIZE(set15)); in check_erase2_sets()
34004 mtree_destroy(mt); in check_erase2_sets()
34009 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34010 check_erase2_testset(mt, set16, ARRAY_SIZE(set16)); in check_erase2_sets()
34013 MT_BUG_ON(mt, mas.last != 139921865547775); in check_erase2_sets()
34015 mtree_destroy(mt); in check_erase2_sets()
34024 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34025 check_erase2_testset(mt, set17, ARRAY_SIZE(set17)); in check_erase2_sets()
34028 MT_BUG_ON(mt, mas.last != 139953197322239); in check_erase2_sets()
34029 /* MT_BUG_ON(mt, mas.index != 139953197318144); */ in check_erase2_sets()
34031 mtree_destroy(mt); in check_erase2_sets()
34040 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34041 check_erase2_testset(mt, set18, ARRAY_SIZE(set18)); in check_erase2_sets()
34044 MT_BUG_ON(mt, mas.last != 140222968475647); in check_erase2_sets()
34045 /*MT_BUG_ON(mt, mas.index != 140222966259712); */ in check_erase2_sets()
34047 mtree_destroy(mt); in check_erase2_sets()
34058 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34059 check_erase2_testset(mt, set19, ARRAY_SIZE(set19)); in check_erase2_sets()
34063 MT_BUG_ON(mt, entry != xa_mk_value(140656779083776)); in check_erase2_sets()
34065 MT_BUG_ON(mt, entry != xa_mk_value(140656766251008)); in check_erase2_sets()
34067 mtree_destroy(mt); in check_erase2_sets()
34074 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34075 check_erase2_testset(mt, set20, ARRAY_SIZE(set20)); in check_erase2_sets()
34077 check_load(mt, 94849009414144, NULL); in check_erase2_sets()
34079 mtree_destroy(mt); in check_erase2_sets()
34082 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34083 check_erase2_testset(mt, set21, ARRAY_SIZE(set21)); in check_erase2_sets()
34085 mt_validate(mt); in check_erase2_sets()
34087 mtree_destroy(mt); in check_erase2_sets()
34090 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34091 check_erase2_testset(mt, set22, ARRAY_SIZE(set22)); in check_erase2_sets()
34093 mt_validate(mt); in check_erase2_sets()
34094 ptr = mtree_load(mt, 140551363362816); in check_erase2_sets()
34095 MT_BUG_ON(mt, ptr == mtree_load(mt, 140551363420159)); in check_erase2_sets()
34097 mtree_destroy(mt); in check_erase2_sets()
34100 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34101 check_erase2_testset(mt, set23, ARRAY_SIZE(set23)); in check_erase2_sets()
34104 mt_validate(mt); in check_erase2_sets()
34105 mtree_destroy(mt); in check_erase2_sets()
34109 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34110 check_erase2_testset(mt, set24, ARRAY_SIZE(set24)); in check_erase2_sets()
34113 mt_validate(mt); in check_erase2_sets()
34114 mtree_destroy(mt); in check_erase2_sets()
34117 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34118 check_erase2_testset(mt, set25, ARRAY_SIZE(set25)); in check_erase2_sets()
34121 mt_validate(mt); in check_erase2_sets()
34122 mtree_destroy(mt); in check_erase2_sets()
34127 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34128 check_erase2_testset(mt, set26, ARRAY_SIZE(set26)); in check_erase2_sets()
34131 MT_BUG_ON(mt, mas.last != 140109040959487); in check_erase2_sets()
34133 mt_validate(mt); in check_erase2_sets()
34134 mtree_destroy(mt); in check_erase2_sets()
34139 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34140 check_erase2_testset(mt, set27, ARRAY_SIZE(set27)); in check_erase2_sets()
34142 MT_BUG_ON(mt, NULL != mtree_load(mt, 140415537422336)); in check_erase2_sets()
34144 mt_validate(mt); in check_erase2_sets()
34145 mtree_destroy(mt); in check_erase2_sets()
34149 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34150 check_erase2_testset(mt, set28, ARRAY_SIZE(set28)); in check_erase2_sets()
34155 MT_BUG_ON(mt, mas.index != 139918401601536); in check_erase2_sets()
34157 mt_validate(mt); in check_erase2_sets()
34158 mtree_destroy(mt); in check_erase2_sets()
34165 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34166 check_erase2_testset(mt, set29, ARRAY_SIZE(set29)); in check_erase2_sets()
34169 mt_validate(mt); in check_erase2_sets()
34170 mtree_destroy(mt); in check_erase2_sets()
34178 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34179 check_erase2_testset(mt, set30, ARRAY_SIZE(set30)); in check_erase2_sets()
34182 mt_validate(mt); in check_erase2_sets()
34183 mtree_destroy(mt); in check_erase2_sets()
34191 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34192 check_erase2_testset(mt, set31, ARRAY_SIZE(set31)); in check_erase2_sets()
34195 mt_validate(mt); in check_erase2_sets()
34196 mtree_destroy(mt); in check_erase2_sets()
34200 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34201 check_erase2_testset(mt, set32, ARRAY_SIZE(set32)); in check_erase2_sets()
34204 mt_validate(mt); in check_erase2_sets()
34205 mtree_destroy(mt); in check_erase2_sets()
34209 * mt 140582827569152 gap_end 140582869532672 in check_erase2_sets()
34220 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34221 check_erase2_testset(mt, set33, ARRAY_SIZE(set33)); in check_erase2_sets()
34224 MT_BUG_ON(mt, mas.last != 140583003750399); in check_erase2_sets()
34226 mt_validate(mt); in check_erase2_sets()
34227 mtree_destroy(mt); in check_erase2_sets()
34235 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34236 check_erase2_testset(mt, set34, ARRAY_SIZE(set34)); in check_erase2_sets()
34239 mt_validate(mt); in check_erase2_sets()
34240 mtree_destroy(mt); in check_erase2_sets()
34245 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34246 check_erase2_testset(mt, set35, ARRAY_SIZE(set35)); in check_erase2_sets()
34249 mt_validate(mt); in check_erase2_sets()
34250 mtree_destroy(mt); in check_erase2_sets()
34255 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34256 check_erase2_testset(mt, set36, ARRAY_SIZE(set36)); in check_erase2_sets()
34259 mt_validate(mt); in check_erase2_sets()
34260 mtree_destroy(mt); in check_erase2_sets()
34263 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34264 check_erase2_testset(mt, set37, ARRAY_SIZE(set37)); in check_erase2_sets()
34266 MT_BUG_ON(mt, NULL != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34267 mt_validate(mt); in check_erase2_sets()
34268 mtree_destroy(mt); in check_erase2_sets()
34271 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34272 check_erase2_testset(mt, set38, ARRAY_SIZE(set38)); in check_erase2_sets()
34274 MT_BUG_ON(mt, NULL != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34275 mt_validate(mt); in check_erase2_sets()
34276 mtree_destroy(mt); in check_erase2_sets()
34279 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34280 check_erase2_testset(mt, set39, ARRAY_SIZE(set39)); in check_erase2_sets()
34282 mt_validate(mt); in check_erase2_sets()
34283 mtree_destroy(mt); in check_erase2_sets()
34286 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34287 check_erase2_testset(mt, set40, ARRAY_SIZE(set40)); in check_erase2_sets()
34289 mt_validate(mt); in check_erase2_sets()
34290 mtree_destroy(mt); in check_erase2_sets()
34293 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34294 check_erase2_testset(mt, set41, ARRAY_SIZE(set41)); in check_erase2_sets()
34296 mt_validate(mt); in check_erase2_sets()
34297 mtree_destroy(mt); in check_erase2_sets()
34302 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34303 check_erase2_testset(mt, set42, ARRAY_SIZE(set42)); in check_erase2_sets()
34306 MT_BUG_ON(mt, mas.last != 4041211903); in check_erase2_sets()
34308 mt_validate(mt); in check_erase2_sets()
34309 mtree_destroy(mt); in check_erase2_sets()
34314 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34315 check_erase2_testset(mt, set43, ARRAY_SIZE(set43)); in check_erase2_sets()
34318 mt_validate(mt); in check_erase2_sets()
34319 mtree_destroy(mt); in check_erase2_sets()
34358 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_fwd()
34468 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_rev()
34530 mt_dump(test->mt, mt_dump_dec); in rcu_reader_rev()
34593 static void rcu_stress_rev(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_rev() argument
34613 mtree_store_range(mt, start, end, in rcu_stress_rev()
34623 mtree_store_range(mt, start, end, in rcu_stress_rev()
34634 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_rev()
34641 mtree_store_range(mt, start, end, in rcu_stress_rev()
34652 static void rcu_stress_fwd(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_fwd() argument
34670 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34680 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34691 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_fwd()
34698 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34720 static void rcu_stress(struct maple_tree *mt, bool forward) in rcu_stress() argument
34730 test.mt = mt; in rcu_stress()
34742 mtree_store_range(mt, seed, r, in rcu_stress()
34774 mtree_store_range(mt, test.index[add], test.last[add], in rcu_stress()
34778 mt_set_in_rcu(mt); in rcu_stress()
34784 rcu_stress_fwd(mt, &test, count, test_reader); in rcu_stress()
34786 rcu_stress_rev(mt, &test, count, test_reader); in rcu_stress()
34792 mt_validate(mt); in rcu_stress()
34797 struct maple_tree *mt; /* the maple tree */ member
34876 entry = mtree_load(test->mt, test->index); in rcu_val()
34877 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, &update_2, in rcu_val()
34897 MA_STATE(mas, test->mt, test->range_start, test->range_start); in rcu_loop()
34918 MT_BUG_ON(test->mt, entry != expected); in rcu_loop()
34926 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, in rcu_loop()
34939 void run_check_rcu(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu() argument
34946 mt_set_in_rcu(mt); in run_check_rcu()
34947 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu()
34962 mtree_store_range(mt, vals->index, vals->last, vals->entry2, in run_check_rcu()
34968 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu()
34974 MA_STATE(mas, test->mt, test->index, test->index); in rcu_slot_store_reader()
34991 static noinline void run_check_rcu_slot_store(struct maple_tree *mt) in run_check_rcu_slot_store() argument
34996 struct rcu_test_struct3 test = {.stop = false, .mt = mt}; in run_check_rcu_slot_store()
35004 mtree_store_range(mt, i * len, i * len + len - 1, in run_check_rcu_slot_store()
35008 mt_set_in_rcu(mt); in run_check_rcu_slot_store()
35009 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu_slot_store()
35023 mtree_store_range(mt, --start, ++end, xa_mk_value(100), in run_check_rcu_slot_store()
35032 mt_validate(mt); in run_check_rcu_slot_store()
35036 void run_check_rcu_slowread(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu_slowread() argument
35044 mt_set_in_rcu(mt); in run_check_rcu_slowread()
35045 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu_slowread()
35062 mtree_store(mt, index, in run_check_rcu_slowread()
35073 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu_slowread()
35074 MT_BUG_ON(mt, !vals->seen_entry3); in run_check_rcu_slowread()
35075 MT_BUG_ON(mt, !vals->seen_both); in run_check_rcu_slowread()
35077 static noinline void __init check_rcu_simulated(struct maple_tree *mt) in check_rcu_simulated() argument
35083 MA_STATE(mas_writer, mt, 0, 0); in check_rcu_simulated()
35084 MA_STATE(mas_reader, mt, target, target); in check_rcu_simulated()
35088 mt_set_in_rcu(mt); in check_rcu_simulated()
35100 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35104 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35117 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35121 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35134 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35138 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35151 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35155 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35167 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35171 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35183 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35187 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35208 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35213 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val)); in check_rcu_simulated()
35227 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35232 MT_BUG_ON(mt, mas_prev(&mas_reader, 0) != xa_mk_value(val)); in check_rcu_simulated()
35238 static noinline void __init check_rcu_threaded(struct maple_tree *mt) in check_rcu_threaded() argument
35248 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35252 vals.mt = mt; in check_rcu_threaded()
35263 run_check_rcu(mt, &vals); in check_rcu_threaded()
35264 mtree_destroy(mt); in check_rcu_threaded()
35266 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35268 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35275 vals.mt = mt; in check_rcu_threaded()
35284 run_check_rcu(mt, &vals); in check_rcu_threaded()
35285 mtree_destroy(mt); in check_rcu_threaded()
35288 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35289 run_check_rcu_slot_store(mt); in check_rcu_threaded()
35290 mtree_destroy(mt); in check_rcu_threaded()
35293 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35294 rcu_stress(mt, true); in check_rcu_threaded()
35295 mtree_destroy(mt); in check_rcu_threaded()
35298 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35299 rcu_stress(mt, false); in check_rcu_threaded()
35300 mtree_destroy(mt); in check_rcu_threaded()
35303 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35305 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35312 vals.mt = mt; in check_rcu_threaded()
35333 run_check_rcu_slowread(mt, &vals); in check_rcu_threaded()
35387 static void check_dfs_preorder(struct maple_tree *mt) in check_dfs_preorder() argument
35391 MA_STATE(mas, mt, 0, 0); in check_dfs_preorder()
35398 check_seq(mt, max, false); in check_dfs_preorder()
35403 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35404 mtree_destroy(mt); in check_dfs_preorder()
35406 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35412 check_seq(mt, max, false); in check_dfs_preorder()
35418 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35419 mtree_destroy(mt); in check_dfs_preorder()
35421 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35424 check_rev_seq(mt, max, false); in check_dfs_preorder()
35430 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35431 mtree_destroy(mt); in check_dfs_preorder()
35433 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35441 MT_BUG_ON(mt, mas_is_err(&mas)); in check_dfs_preorder()
35455 static noinline void __init check_prealloc(struct maple_tree *mt) in check_prealloc() argument
35462 MA_STATE(mas, mt, 10, 20); in check_prealloc()
35466 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_prealloc()
35470 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35473 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35474 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35477 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35479 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35482 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35483 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35484 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35487 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35490 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35493 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35495 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35498 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35501 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35503 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35506 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35508 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35509 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35512 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35516 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35519 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35521 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35523 MT_BUG_ON(mt, mas_allocated(&mas) != allocated); in check_prealloc()
35524 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35527 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35529 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35532 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35534 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35538 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35540 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35542 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35545 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35548 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35550 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35554 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35557 MT_BUG_ON(mt, allocated != 1 + height * 2); in check_prealloc()
35559 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35563 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35566 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35571 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35574 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35575 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35577 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35580 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35583 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35588 static noinline void __init check_spanning_write(struct maple_tree *mt) in check_spanning_write() argument
35591 MA_STATE(mas, mt, 1200, 2380); in check_spanning_write()
35594 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35596 mtree_lock(mt); in check_spanning_write()
35599 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35600 mtree_unlock(mt); in check_spanning_write()
35601 mtree_destroy(mt); in check_spanning_write()
35604 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35606 mtree_lock(mt); in check_spanning_write()
35610 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35611 mtree_unlock(mt); in check_spanning_write()
35612 mt_validate(mt); in check_spanning_write()
35613 mtree_destroy(mt); in check_spanning_write()
35616 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35618 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35621 mtree_lock(mt); in check_spanning_write()
35624 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35625 mtree_unlock(mt); in check_spanning_write()
35626 mtree_destroy(mt); in check_spanning_write()
35629 mt_init_flags(mt, 0); in check_spanning_write()
35631 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35634 mtree_lock(mt); in check_spanning_write()
35637 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35638 mtree_unlock(mt); in check_spanning_write()
35639 mtree_destroy(mt); in check_spanning_write()
35642 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35644 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35647 mtree_lock(mt); in check_spanning_write()
35650 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35651 mtree_unlock(mt); in check_spanning_write()
35652 mtree_destroy(mt); in check_spanning_write()
35655 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35657 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35660 mtree_lock(mt); in check_spanning_write()
35663 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35664 mtree_unlock(mt); in check_spanning_write()
35665 mtree_destroy(mt); in check_spanning_write()
35668 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35670 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35673 mtree_lock(mt); in check_spanning_write()
35676 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35677 mtree_unlock(mt); in check_spanning_write()
35678 mtree_destroy(mt); in check_spanning_write()
35684 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35686 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35689 mtree_lock(mt); in check_spanning_write()
35692 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35693 mtree_unlock(mt); in check_spanning_write()
35694 mtree_destroy(mt); in check_spanning_write()
35700 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35702 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35704 mtree_lock(mt); in check_spanning_write()
35707 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35708 mtree_unlock(mt); in check_spanning_write()
35709 mtree_destroy(mt); in check_spanning_write()
35712 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35714 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35715 mtree_lock(mt); in check_spanning_write()
35726 MT_BUG_ON(mt, (mas_data_end(&mas)) != mt_slot_count(mas.node) - 1); in check_spanning_write()
35729 mtree_unlock(mt); in check_spanning_write()
35730 mtree_destroy(mt); in check_spanning_write()
35733 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35735 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35737 mtree_lock(mt); in check_spanning_write()
35740 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35741 mtree_unlock(mt); in check_spanning_write()
35742 mtree_destroy(mt); in check_spanning_write()
35744 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35746 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35748 mtree_lock(mt); in check_spanning_write()
35751 mtree_unlock(mt); in check_spanning_write()
35756 static noinline void __init check_null_expand(struct maple_tree *mt) in check_null_expand() argument
35760 MA_STATE(mas, mt, 959, 959); in check_null_expand()
35763 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_null_expand()
35770 MT_BUG_ON(mt, mtree_load(mt, 963) != NULL); in check_null_expand()
35771 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35779 MT_BUG_ON(mt, mtree_load(mt, 884) != NULL); in check_null_expand()
35780 MT_BUG_ON(mt, mtree_load(mt, 889) != NULL); in check_null_expand()
35782 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35791 MT_BUG_ON(mt, mtree_load(mt, 899) != NULL); in check_null_expand()
35792 MT_BUG_ON(mt, mtree_load(mt, 900) != NULL); in check_null_expand()
35793 MT_BUG_ON(mt, mtree_load(mt, 905) != NULL); in check_null_expand()
35794 MT_BUG_ON(mt, mtree_load(mt, 906) != NULL); in check_null_expand()
35796 MT_BUG_ON(mt, data_end - 2 != mas_data_end(&mas)); in check_null_expand()
35805 MT_BUG_ON(mt, mtree_load(mt, 809) != NULL); in check_null_expand()
35806 MT_BUG_ON(mt, mtree_load(mt, 810) != NULL); in check_null_expand()
35807 MT_BUG_ON(mt, mtree_load(mt, 825) != NULL); in check_null_expand()
35808 MT_BUG_ON(mt, mtree_load(mt, 826) != NULL); in check_null_expand()
35810 MT_BUG_ON(mt, data_end - 4 != mas_data_end(&mas)); in check_null_expand()
35817 static noinline void __init check_nomem(struct maple_tree *mt) in check_nomem() argument
35819 MA_STATE(ms, mt, 1, 1); in check_nomem()
35821 MT_BUG_ON(mt, !mtree_empty(mt)); in check_nomem()
35826 MT_BUG_ON(mt, mtree_insert(mt, 1, &ms, GFP_ATOMIC) != -ENOMEM); in check_nomem()
35828 MT_BUG_ON(mt, mtree_insert(mt, 0, &ms, GFP_ATOMIC) != 0); in check_nomem()
35838 mtree_lock(mt); in check_nomem()
35840 MT_BUG_ON(mt, ms.node != MA_ERROR(-ENOMEM)); in check_nomem()
35842 MT_BUG_ON(mt, ms.status != ma_start); in check_nomem()
35843 mtree_unlock(mt); in check_nomem()
35844 MT_BUG_ON(mt, mtree_insert(mt, 2, mt, GFP_KERNEL) != 0); in check_nomem()
35845 mtree_lock(mt); in check_nomem()
35848 mtree_unlock(mt); in check_nomem()
35849 mtree_destroy(mt); in check_nomem()
35852 static noinline void __init check_locky(struct maple_tree *mt) in check_locky() argument
35854 MA_STATE(ms, mt, 2, 2); in check_locky()
35855 MA_STATE(reader, mt, 2, 2); in check_locky()
35858 mt_set_in_rcu(mt); in check_locky()
35864 mt_clear_in_rcu(mt); in check_locky()
36013 * @mt: The tree to build.
36021 static __init int build_full_tree(struct maple_tree *mt, unsigned int flags, in build_full_tree() argument
36024 MA_STATE(mas, mt, 0, 0); in build_full_tree()
36029 mt_init_flags(mt, flags); in build_full_tree()
36030 mtree_insert_range(mt, 0, ULONG_MAX, xa_mk_value(5), GFP_KERNEL); in build_full_tree()
36032 mtree_lock(mt); in build_full_tree()
36036 if (mt_height(mt) < height) { in build_full_tree()
36067 mtree_unlock(mt); in build_full_tree()
36069 MT_BUG_ON(mt, mt_height(mt) != height); in build_full_tree()
36070 /* pr_info("height:%u number of elements:%d\n", mt_height(mt), cnt); */ in build_full_tree()
36074 static noinline void __init check_mtree_dup(struct maple_tree *mt) in check_mtree_dup() argument
36081 mt_init_flags(mt, 0); in check_mtree_dup()
36082 mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL); in check_mtree_dup()
36083 ret = mtree_dup(mt, &new, GFP_KERNEL); in check_mtree_dup()
36086 if (compare_tree(mt, &new)) in check_mtree_dup()
36089 mtree_destroy(mt); in check_mtree_dup()
36093 mt_init_flags(mt, 0); in check_mtree_dup()
36095 ret = mtree_dup(mt, &new, GFP_KERNEL); in check_mtree_dup()
36097 mtree_destroy(mt); in check_mtree_dup()
36101 mt_init_flags(mt, 0); in check_mtree_dup()
36104 ret = mtree_dup(mt, &new, GFP_KERNEL); in check_mtree_dup()
36106 mtree_destroy(mt); in check_mtree_dup()
36111 ret = build_full_tree(mt, 0, i); in check_mtree_dup()
36112 MT_BUG_ON(mt, ret); in check_mtree_dup()
36115 ret = mtree_dup(mt, &new, GFP_KERNEL); in check_mtree_dup()
36118 if (compare_tree(mt, &new)) in check_mtree_dup()
36121 mtree_destroy(mt); in check_mtree_dup()
36126 ret = build_full_tree(mt, MT_FLAGS_ALLOC_RANGE, i); in check_mtree_dup()
36127 MT_BUG_ON(mt, ret); in check_mtree_dup()
36130 ret = mtree_dup(mt, &new, GFP_KERNEL); in check_mtree_dup()
36133 if (compare_tree(mt, &new)) in check_mtree_dup()
36136 mtree_destroy(mt); in check_mtree_dup()
36143 mt_init_flags(mt, 0); in check_mtree_dup()
36146 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_mtree_dup()
36151 mtree_store_range(mt, j * 10, j * 10 + 5, in check_mtree_dup()
36155 ret = mtree_dup(mt, &new, GFP_KERNEL); in check_mtree_dup()
36158 if (compare_tree(mt, &new)) in check_mtree_dup()
36161 mtree_destroy(mt); in check_mtree_dup()
36166 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_mtree_dup()
36168 mtree_store_range(mt, j * 10, j * 10 + 5, in check_mtree_dup()
36175 ret = mtree_dup(mt, &new, GFP_NOWAIT); in check_mtree_dup()
36178 mtree_destroy(mt); in check_mtree_dup()
36184 mt_init_flags(mt, 0); in check_mtree_dup()
36187 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_mtree_dup()
36192 mtree_store_range(mt, j * 10, j * 10 + 5, in check_mtree_dup()
36203 ret = mtree_dup(mt, &new, GFP_NOWAIT); in check_mtree_dup()
36208 mtree_destroy(mt); in check_mtree_dup()
36213 if (compare_tree(mt, &new)) in check_mtree_dup()
36216 mtree_destroy(mt); in check_mtree_dup()