Lines Matching refs:mt
29 #define RCU_MT_BUG_ON(test, y) {if (y) { test->stop = true; } MT_BUG_ON(test->mt, y); }
32 struct maple_tree *mt; member
49 struct maple_tree *mt; member
91 static noinline void __init check_new_node(struct maple_tree *mt) in check_new_node() argument
99 MA_STATE(mas, mt, 0, 0); in check_new_node()
104 mtree_lock(mt); in check_new_node()
109 MT_BUG_ON(mt, mas_alloc_req(&mas) != 3); in check_new_node()
111 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
112 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
114 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
116 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
117 MT_BUG_ON(mt, mn == NULL); in check_new_node()
118 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
119 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
122 mtree_unlock(mt); in check_new_node()
126 mtree_lock(mt); in check_new_node()
130 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
133 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
136 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
137 MT_BUG_ON(mt, mn == NULL); in check_new_node()
138 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
139 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
140 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
151 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
153 MT_BUG_ON(mt, mas_alloc_req(&mas) != 0); in check_new_node()
155 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
156 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
157 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
159 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
165 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
168 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
169 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
172 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
173 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
174 MT_BUG_ON(mt, mn == NULL); in check_new_node()
175 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
176 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
177 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
179 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
180 MT_BUG_ON(mt, mas.alloc->node_count); in check_new_node()
183 MT_BUG_ON(mt, mas_alloc_req(&mas) != 2); in check_new_node()
185 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
186 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
187 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
188 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
189 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
192 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
193 MT_BUG_ON(mt, !mn); in check_new_node()
194 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
201 MT_BUG_ON(mt, mas_alloc_req(&mas) != total); in check_new_node()
203 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
223 MT_BUG_ON(mt, mas.alloc->node_count != e); in check_new_node()
225 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
226 MT_BUG_ON(mt, mas_allocated(&mas) != i - 1); in check_new_node()
227 MT_BUG_ON(mt, !mn); in check_new_node()
236 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
239 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
240 MT_BUG_ON(mt, !mn); in check_new_node()
241 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
243 MT_BUG_ON(mt, mas_allocated(&mas) != j); in check_new_node()
245 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
246 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
250 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
254 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
256 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
258 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
264 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
266 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
268 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
270 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
273 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
275 MT_BUG_ON(mt, mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
284 MT_BUG_ON(mt, !mas.alloc); in check_new_node()
290 MT_BUG_ON(mt, !smn->slot[j]); in check_new_node()
296 MT_BUG_ON(mt, mas_allocated(&mas) != total); in check_new_node()
299 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
303 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
306 MT_BUG_ON(mt, mn == NULL); in check_new_node()
307 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
311 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
315 MA_STATE(mas2, mt, 0, 0); in check_new_node()
318 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
321 MT_BUG_ON(mt, mn == NULL); in check_new_node()
322 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
324 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
326 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
327 MT_BUG_ON(mt, mas_allocated(&mas2) != i); in check_new_node()
330 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
332 MT_BUG_ON(mt, mn == NULL); in check_new_node()
333 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
337 MT_BUG_ON(mt, mas_allocated(&mas2) != 0); in check_new_node()
341 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
343 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
344 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
345 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
346 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
349 MT_BUG_ON(mt, mn == NULL); in check_new_node()
350 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
351 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS); in check_new_node()
352 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
355 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
356 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
360 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
361 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
362 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
363 MT_BUG_ON(mt, mas_alloc_req(&mas)); in check_new_node()
364 MT_BUG_ON(mt, mas.alloc->node_count != 1); in check_new_node()
365 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
367 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
368 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
369 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS); in check_new_node()
371 MT_BUG_ON(mt, mas.alloc->node_count != 1); in check_new_node()
372 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
374 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
379 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
383 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
440 MT_BUG_ON(mt, mas_allocated(&mas) != 5); in check_new_node()
445 MT_BUG_ON(mt, mas_allocated(&mas) != 10); in check_new_node()
451 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
456 MT_BUG_ON(mt, mas_allocated(&mas) != 10 + MAPLE_ALLOC_SLOTS - 1); in check_new_node()
459 mtree_unlock(mt); in check_new_node()
465 static noinline void __init check_erase(struct maple_tree *mt, unsigned long index, in check_erase() argument
468 MT_BUG_ON(mt, mtree_test_erase(mt, index) != ptr); in check_erase()
471 #define erase_check_load(mt, i) check_load(mt, set[i], entry[i%2]) argument
472 #define erase_check_insert(mt, i) check_insert(mt, set[i], entry[i%2]) argument
473 #define erase_check_erase(mt, i) check_erase(mt, set[i], entry[i%2]) argument
475 static noinline void __init check_erase_testset(struct maple_tree *mt) in check_erase_testset() argument
493 void *entry[2] = { ptr, mt }; in check_erase_testset()
498 mt_set_in_rcu(mt); in check_erase_testset()
500 erase_check_insert(mt, i); in check_erase_testset()
502 erase_check_load(mt, i); in check_erase_testset()
505 erase_check_erase(mt, 1); in check_erase_testset()
506 erase_check_load(mt, 0); in check_erase_testset()
507 check_load(mt, set[1], NULL); in check_erase_testset()
509 erase_check_load(mt, i); in check_erase_testset()
512 erase_check_erase(mt, 2); in check_erase_testset()
513 erase_check_load(mt, 0); in check_erase_testset()
514 check_load(mt, set[1], NULL); in check_erase_testset()
515 check_load(mt, set[2], NULL); in check_erase_testset()
517 erase_check_insert(mt, 1); in check_erase_testset()
518 erase_check_insert(mt, 2); in check_erase_testset()
521 erase_check_load(mt, i); in check_erase_testset()
524 erase_check_load(mt, 3); in check_erase_testset()
525 erase_check_erase(mt, 1); in check_erase_testset()
526 erase_check_load(mt, 0); in check_erase_testset()
527 check_load(mt, set[1], NULL); in check_erase_testset()
529 erase_check_load(mt, i); in check_erase_testset()
535 root_node = mt->ma_root; in check_erase_testset()
536 erase_check_insert(mt, 1); in check_erase_testset()
538 erase_check_load(mt, 0); in check_erase_testset()
539 check_load(mt, 5016, NULL); in check_erase_testset()
540 erase_check_load(mt, 1); in check_erase_testset()
541 check_load(mt, 5013, NULL); in check_erase_testset()
542 erase_check_load(mt, 2); in check_erase_testset()
543 check_load(mt, 5018, NULL); in check_erase_testset()
544 erase_check_load(mt, 3); in check_erase_testset()
546 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
547 erase_check_load(mt, 0); in check_erase_testset()
548 check_load(mt, 5016, NULL); in check_erase_testset()
549 erase_check_load(mt, 1); in check_erase_testset()
550 check_load(mt, 5013, NULL); in check_erase_testset()
551 check_load(mt, set[2], NULL); in check_erase_testset()
552 check_load(mt, 5018, NULL); in check_erase_testset()
554 erase_check_load(mt, 3); in check_erase_testset()
556 root_node = mt->ma_root; in check_erase_testset()
557 erase_check_insert(mt, 2); in check_erase_testset()
559 erase_check_load(mt, 0); in check_erase_testset()
560 check_load(mt, 5016, NULL); in check_erase_testset()
561 erase_check_load(mt, 1); in check_erase_testset()
562 check_load(mt, 5013, NULL); in check_erase_testset()
563 erase_check_load(mt, 2); in check_erase_testset()
564 check_load(mt, 5018, NULL); in check_erase_testset()
565 erase_check_load(mt, 3); in check_erase_testset()
568 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
569 erase_check_load(mt, 0); in check_erase_testset()
570 check_load(mt, 5016, NULL); in check_erase_testset()
571 check_load(mt, set[2], NULL); in check_erase_testset()
572 erase_check_erase(mt, 0); /* erase 5015 to check append */ in check_erase_testset()
573 check_load(mt, set[0], NULL); in check_erase_testset()
574 check_load(mt, 5016, NULL); in check_erase_testset()
575 erase_check_insert(mt, 4); /* 1000 < Should not split. */ in check_erase_testset()
576 check_load(mt, set[0], NULL); in check_erase_testset()
577 check_load(mt, 5016, NULL); in check_erase_testset()
578 erase_check_load(mt, 1); in check_erase_testset()
579 check_load(mt, 5013, NULL); in check_erase_testset()
580 check_load(mt, set[2], NULL); in check_erase_testset()
581 check_load(mt, 5018, NULL); in check_erase_testset()
582 erase_check_load(mt, 4); in check_erase_testset()
583 check_load(mt, 999, NULL); in check_erase_testset()
584 check_load(mt, 1001, NULL); in check_erase_testset()
585 erase_check_load(mt, 4); in check_erase_testset()
586 if (mt_in_rcu(mt)) in check_erase_testset()
587 MT_BUG_ON(mt, root_node == mt->ma_root); in check_erase_testset()
589 MT_BUG_ON(mt, root_node != mt->ma_root); in check_erase_testset()
592 MT_BUG_ON(mt, !mte_is_leaf(mt->ma_root)); in check_erase_testset()
596 erase_check_insert(mt, 0); in check_erase_testset()
597 erase_check_insert(mt, 2); in check_erase_testset()
600 erase_check_insert(mt, i); in check_erase_testset()
602 erase_check_load(mt, j); in check_erase_testset()
605 erase_check_erase(mt, 14); /*6015 */ in check_erase_testset()
608 check_load(mt, set[i], NULL); in check_erase_testset()
610 erase_check_load(mt, i); in check_erase_testset()
612 erase_check_erase(mt, 16); /*7002 */ in check_erase_testset()
615 check_load(mt, set[i], NULL); in check_erase_testset()
617 erase_check_load(mt, i); in check_erase_testset()
622 erase_check_erase(mt, 13); /*6012 */ in check_erase_testset()
625 check_load(mt, set[i], NULL); in check_erase_testset()
627 erase_check_load(mt, i); in check_erase_testset()
630 erase_check_erase(mt, 15); /*7003 */ in check_erase_testset()
633 check_load(mt, set[i], NULL); in check_erase_testset()
635 erase_check_load(mt, i); in check_erase_testset()
639 erase_check_erase(mt, 17); /*7008 *should* cause coalesce. */ in check_erase_testset()
642 check_load(mt, set[i], NULL); in check_erase_testset()
644 erase_check_load(mt, i); in check_erase_testset()
647 erase_check_erase(mt, 18); /*7012 */ in check_erase_testset()
650 check_load(mt, set[i], NULL); in check_erase_testset()
652 erase_check_load(mt, i); in check_erase_testset()
656 erase_check_erase(mt, 19); /*7015 */ in check_erase_testset()
659 check_load(mt, set[i], NULL); in check_erase_testset()
661 erase_check_load(mt, i); in check_erase_testset()
664 erase_check_erase(mt, 20); /*8003 */ in check_erase_testset()
667 check_load(mt, set[i], NULL); in check_erase_testset()
669 erase_check_load(mt, i); in check_erase_testset()
672 erase_check_erase(mt, 21); /*8002 */ in check_erase_testset()
675 check_load(mt, set[i], NULL); in check_erase_testset()
677 erase_check_load(mt, i); in check_erase_testset()
681 erase_check_erase(mt, 22); /*8008 */ in check_erase_testset()
684 check_load(mt, set[i], NULL); in check_erase_testset()
686 erase_check_load(mt, i); in check_erase_testset()
689 erase_check_erase(mt, i); in check_erase_testset()
693 check_load(mt, set[i], NULL); in check_erase_testset()
695 erase_check_load(mt, i); in check_erase_testset()
701 erase_check_insert(mt, i); in check_erase_testset()
705 erase_check_erase(mt, i); in check_erase_testset()
708 erase_check_load(mt, j); in check_erase_testset()
710 check_load(mt, set[j], NULL); in check_erase_testset()
715 erase_check_erase(mt, i); in check_erase_testset()
718 erase_check_load(mt, j); in check_erase_testset()
720 check_load(mt, set[j], NULL); in check_erase_testset()
723 erase_check_insert(mt, 8); in check_erase_testset()
724 erase_check_insert(mt, 9); in check_erase_testset()
725 erase_check_erase(mt, 8); in check_erase_testset()
732 #define erase_check_store_range(mt, a, i, ptr) mtree_test_store_range(mt, \ argument
979 static noinline void __init check_erase2_testset(struct maple_tree *mt, in check_erase2_testset() argument
988 MA_STATE(mas, mt, 0, 0); in check_erase2_testset()
995 MA_STATE(mas_start, mt, set[i+1], set[i+1]); in check_erase2_testset()
996 MA_STATE(mas_end, mt, set[i+2], set[i+2]); in check_erase2_testset()
1023 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1051 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1056 check_erase(mt, set[i+1], xa_mk_value(set[i+1])); in check_erase2_testset()
1060 mt_validate(mt); in check_erase2_testset()
1062 MT_BUG_ON(mt, !mt_height(mt)); in check_erase2_testset()
1064 mt_dump(mt, mt_dump_hex); in check_erase2_testset()
1072 mt_for_each(mt, foo, addr, ULONG_MAX) { in check_erase2_testset()
1085 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1098 MT_BUG_ON(mt, 1); in check_erase2_testset()
1113 mt_validate(mt); in check_erase2_testset()
1116 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1118 MT_BUG_ON(mt, mtree_load(mas.tree, 0) != NULL); in check_erase2_testset()
1124 static noinline void __init check_erase2_sets(struct maple_tree *mt) in check_erase2_sets() argument
33896 MA_STATE(mas, mt, 0, 0); in check_erase2_sets()
33899 check_erase2_testset(mt, set, ARRAY_SIZE(set)); in check_erase2_sets()
33901 mtree_destroy(mt); in check_erase2_sets()
33903 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33904 check_erase2_testset(mt, set2, ARRAY_SIZE(set2)); in check_erase2_sets()
33906 MT_BUG_ON(mt, !!mt_find(mt, &start, 140735933906943UL)); in check_erase2_sets()
33907 mtree_destroy(mt); in check_erase2_sets()
33910 mt_init_flags(mt, 0); in check_erase2_sets()
33911 check_erase2_testset(mt, set3, ARRAY_SIZE(set3)); in check_erase2_sets()
33913 mtree_destroy(mt); in check_erase2_sets()
33915 mt_init_flags(mt, 0); in check_erase2_sets()
33916 check_erase2_testset(mt, set4, ARRAY_SIZE(set4)); in check_erase2_sets()
33924 mtree_destroy(mt); in check_erase2_sets()
33926 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33928 check_erase2_testset(mt, set5, ARRAY_SIZE(set5)); in check_erase2_sets()
33931 mtree_destroy(mt); in check_erase2_sets()
33933 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33934 check_erase2_testset(mt, set6, ARRAY_SIZE(set6)); in check_erase2_sets()
33936 mtree_destroy(mt); in check_erase2_sets()
33938 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33939 check_erase2_testset(mt, set7, ARRAY_SIZE(set7)); in check_erase2_sets()
33941 mtree_destroy(mt); in check_erase2_sets()
33943 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33944 check_erase2_testset(mt, set8, ARRAY_SIZE(set8)); in check_erase2_sets()
33946 mtree_destroy(mt); in check_erase2_sets()
33948 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33949 check_erase2_testset(mt, set9, ARRAY_SIZE(set9)); in check_erase2_sets()
33951 mtree_destroy(mt); in check_erase2_sets()
33953 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33954 check_erase2_testset(mt, set10, ARRAY_SIZE(set10)); in check_erase2_sets()
33956 mtree_destroy(mt); in check_erase2_sets()
33959 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33960 check_erase2_testset(mt, set11, ARRAY_SIZE(set11)); in check_erase2_sets()
33963 MT_BUG_ON(mt, mas.last != 140014592573439); in check_erase2_sets()
33964 mtree_destroy(mt); in check_erase2_sets()
33967 mas.tree = mt; in check_erase2_sets()
33970 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33971 check_erase2_testset(mt, set12, ARRAY_SIZE(set12)); in check_erase2_sets()
33979 mtree_destroy(mt); in check_erase2_sets()
33982 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33983 check_erase2_testset(mt, set13, ARRAY_SIZE(set13)); in check_erase2_sets()
33984 mtree_erase(mt, 140373516443648); in check_erase2_sets()
33988 mtree_destroy(mt); in check_erase2_sets()
33989 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33990 check_erase2_testset(mt, set14, ARRAY_SIZE(set14)); in check_erase2_sets()
33992 mtree_destroy(mt); in check_erase2_sets()
33994 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33995 check_erase2_testset(mt, set15, ARRAY_SIZE(set15)); in check_erase2_sets()
33997 mtree_destroy(mt); in check_erase2_sets()
34002 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34003 check_erase2_testset(mt, set16, ARRAY_SIZE(set16)); in check_erase2_sets()
34006 MT_BUG_ON(mt, mas.last != 139921865547775); in check_erase2_sets()
34008 mtree_destroy(mt); in check_erase2_sets()
34017 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34018 check_erase2_testset(mt, set17, ARRAY_SIZE(set17)); in check_erase2_sets()
34021 MT_BUG_ON(mt, mas.last != 139953197322239); in check_erase2_sets()
34024 mtree_destroy(mt); in check_erase2_sets()
34033 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34034 check_erase2_testset(mt, set18, ARRAY_SIZE(set18)); in check_erase2_sets()
34037 MT_BUG_ON(mt, mas.last != 140222968475647); in check_erase2_sets()
34040 mtree_destroy(mt); in check_erase2_sets()
34051 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34052 check_erase2_testset(mt, set19, ARRAY_SIZE(set19)); in check_erase2_sets()
34056 MT_BUG_ON(mt, entry != xa_mk_value(140656779083776)); in check_erase2_sets()
34058 MT_BUG_ON(mt, entry != xa_mk_value(140656766251008)); in check_erase2_sets()
34060 mtree_destroy(mt); in check_erase2_sets()
34067 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34068 check_erase2_testset(mt, set20, ARRAY_SIZE(set20)); in check_erase2_sets()
34070 check_load(mt, 94849009414144, NULL); in check_erase2_sets()
34072 mtree_destroy(mt); in check_erase2_sets()
34075 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34076 check_erase2_testset(mt, set21, ARRAY_SIZE(set21)); in check_erase2_sets()
34078 mt_validate(mt); in check_erase2_sets()
34080 mtree_destroy(mt); in check_erase2_sets()
34083 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34084 check_erase2_testset(mt, set22, ARRAY_SIZE(set22)); in check_erase2_sets()
34086 mt_validate(mt); in check_erase2_sets()
34087 ptr = mtree_load(mt, 140551363362816); in check_erase2_sets()
34088 MT_BUG_ON(mt, ptr == mtree_load(mt, 140551363420159)); in check_erase2_sets()
34090 mtree_destroy(mt); in check_erase2_sets()
34093 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34094 check_erase2_testset(mt, set23, ARRAY_SIZE(set23)); in check_erase2_sets()
34097 mt_validate(mt); in check_erase2_sets()
34098 mtree_destroy(mt); in check_erase2_sets()
34102 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34103 check_erase2_testset(mt, set24, ARRAY_SIZE(set24)); in check_erase2_sets()
34106 mt_validate(mt); in check_erase2_sets()
34107 mtree_destroy(mt); in check_erase2_sets()
34110 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34111 check_erase2_testset(mt, set25, ARRAY_SIZE(set25)); in check_erase2_sets()
34114 mt_validate(mt); in check_erase2_sets()
34115 mtree_destroy(mt); in check_erase2_sets()
34120 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34121 check_erase2_testset(mt, set26, ARRAY_SIZE(set26)); in check_erase2_sets()
34124 MT_BUG_ON(mt, mas.last != 140109040959487); in check_erase2_sets()
34126 mt_validate(mt); in check_erase2_sets()
34127 mtree_destroy(mt); in check_erase2_sets()
34132 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34133 check_erase2_testset(mt, set27, ARRAY_SIZE(set27)); in check_erase2_sets()
34135 MT_BUG_ON(mt, 0 != mtree_load(mt, 140415537422336)); in check_erase2_sets()
34137 mt_validate(mt); in check_erase2_sets()
34138 mtree_destroy(mt); in check_erase2_sets()
34142 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34143 check_erase2_testset(mt, set28, ARRAY_SIZE(set28)); in check_erase2_sets()
34148 MT_BUG_ON(mt, mas.index != 139918401601536); in check_erase2_sets()
34150 mt_validate(mt); in check_erase2_sets()
34151 mtree_destroy(mt); in check_erase2_sets()
34158 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34159 check_erase2_testset(mt, set29, ARRAY_SIZE(set29)); in check_erase2_sets()
34162 mt_validate(mt); in check_erase2_sets()
34163 mtree_destroy(mt); in check_erase2_sets()
34171 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34172 check_erase2_testset(mt, set30, ARRAY_SIZE(set30)); in check_erase2_sets()
34175 mt_validate(mt); in check_erase2_sets()
34176 mtree_destroy(mt); in check_erase2_sets()
34184 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34185 check_erase2_testset(mt, set31, ARRAY_SIZE(set31)); in check_erase2_sets()
34188 mt_validate(mt); in check_erase2_sets()
34189 mtree_destroy(mt); in check_erase2_sets()
34193 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34194 check_erase2_testset(mt, set32, ARRAY_SIZE(set32)); in check_erase2_sets()
34197 mt_validate(mt); in check_erase2_sets()
34198 mtree_destroy(mt); in check_erase2_sets()
34213 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34214 check_erase2_testset(mt, set33, ARRAY_SIZE(set33)); in check_erase2_sets()
34217 MT_BUG_ON(mt, mas.last != 140583003750399); in check_erase2_sets()
34219 mt_validate(mt); in check_erase2_sets()
34220 mtree_destroy(mt); in check_erase2_sets()
34228 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34229 check_erase2_testset(mt, set34, ARRAY_SIZE(set34)); in check_erase2_sets()
34232 mt_validate(mt); in check_erase2_sets()
34233 mtree_destroy(mt); in check_erase2_sets()
34238 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34239 check_erase2_testset(mt, set35, ARRAY_SIZE(set35)); in check_erase2_sets()
34242 mt_validate(mt); in check_erase2_sets()
34243 mtree_destroy(mt); in check_erase2_sets()
34248 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34249 check_erase2_testset(mt, set36, ARRAY_SIZE(set36)); in check_erase2_sets()
34252 mt_validate(mt); in check_erase2_sets()
34253 mtree_destroy(mt); in check_erase2_sets()
34256 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34257 check_erase2_testset(mt, set37, ARRAY_SIZE(set37)); in check_erase2_sets()
34259 MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34260 mt_validate(mt); in check_erase2_sets()
34261 mtree_destroy(mt); in check_erase2_sets()
34264 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34265 check_erase2_testset(mt, set38, ARRAY_SIZE(set38)); in check_erase2_sets()
34267 MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34268 mt_validate(mt); in check_erase2_sets()
34269 mtree_destroy(mt); in check_erase2_sets()
34272 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34273 check_erase2_testset(mt, set39, ARRAY_SIZE(set39)); in check_erase2_sets()
34275 mt_validate(mt); in check_erase2_sets()
34276 mtree_destroy(mt); in check_erase2_sets()
34279 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34280 check_erase2_testset(mt, set40, ARRAY_SIZE(set40)); in check_erase2_sets()
34282 mt_validate(mt); in check_erase2_sets()
34283 mtree_destroy(mt); in check_erase2_sets()
34286 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34287 check_erase2_testset(mt, set41, ARRAY_SIZE(set41)); in check_erase2_sets()
34289 mt_validate(mt); in check_erase2_sets()
34290 mtree_destroy(mt); in check_erase2_sets()
34295 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34296 check_erase2_testset(mt, set42, ARRAY_SIZE(set42)); in check_erase2_sets()
34299 MT_BUG_ON(mt, mas.last != 4041211903); in check_erase2_sets()
34301 mt_validate(mt); in check_erase2_sets()
34302 mtree_destroy(mt); in check_erase2_sets()
34307 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34308 check_erase2_testset(mt, set43, ARRAY_SIZE(set43)); in check_erase2_sets()
34311 mt_validate(mt); in check_erase2_sets()
34312 mtree_destroy(mt); in check_erase2_sets()
34351 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_fwd()
34461 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_rev()
34523 mt_dump(test->mt, mt_dump_dec); in rcu_reader_rev()
34586 static void rcu_stress_rev(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_rev() argument
34606 mtree_store_range(mt, start, end, in rcu_stress_rev()
34616 mtree_store_range(mt, start, end, in rcu_stress_rev()
34627 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_rev()
34634 mtree_store_range(mt, start, end, in rcu_stress_rev()
34645 static void rcu_stress_fwd(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_fwd() argument
34663 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34673 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34684 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_fwd()
34691 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34713 static void rcu_stress(struct maple_tree *mt, bool forward) in rcu_stress() argument
34723 test.mt = mt; in rcu_stress()
34735 mtree_store_range(mt, seed, r, in rcu_stress()
34767 mtree_store_range(mt, test.index[add], test.last[add], in rcu_stress()
34771 mt_set_in_rcu(mt); in rcu_stress()
34777 rcu_stress_fwd(mt, &test, count, test_reader); in rcu_stress()
34779 rcu_stress_rev(mt, &test, count, test_reader); in rcu_stress()
34785 mt_validate(mt); in rcu_stress()
34790 struct maple_tree *mt; /* the maple tree */ member
34869 entry = mtree_load(test->mt, test->index); in rcu_val()
34870 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, &update_2, in rcu_val()
34890 MA_STATE(mas, test->mt, test->range_start, test->range_start); in rcu_loop()
34911 MT_BUG_ON(test->mt, entry != expected); in rcu_loop()
34919 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, in rcu_loop()
34932 void run_check_rcu(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu() argument
34939 mt_set_in_rcu(mt); in run_check_rcu()
34940 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu()
34955 mtree_store_range(mt, vals->index, vals->last, vals->entry2, in run_check_rcu()
34961 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu()
34967 MA_STATE(mas, test->mt, test->index, test->index); in rcu_slot_store_reader()
34984 static noinline void run_check_rcu_slot_store(struct maple_tree *mt) in run_check_rcu_slot_store() argument
34989 struct rcu_test_struct3 test = {.stop = false, .mt = mt}; in run_check_rcu_slot_store()
34997 mtree_store_range(mt, i * len, i * len + len - 1, in run_check_rcu_slot_store()
35001 mt_set_in_rcu(mt); in run_check_rcu_slot_store()
35002 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu_slot_store()
35016 mtree_store_range(mt, --start, ++end, xa_mk_value(100), in run_check_rcu_slot_store()
35025 mt_validate(mt); in run_check_rcu_slot_store()
35029 void run_check_rcu_slowread(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu_slowread() argument
35037 mt_set_in_rcu(mt); in run_check_rcu_slowread()
35038 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu_slowread()
35055 mtree_store(mt, index, in run_check_rcu_slowread()
35066 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu_slowread()
35067 MT_BUG_ON(mt, !vals->seen_entry3); in run_check_rcu_slowread()
35068 MT_BUG_ON(mt, !vals->seen_both); in run_check_rcu_slowread()
35070 static noinline void __init check_rcu_simulated(struct maple_tree *mt) in check_rcu_simulated() argument
35076 MA_STATE(mas_writer, mt, 0, 0); in check_rcu_simulated()
35077 MA_STATE(mas_reader, mt, target, target); in check_rcu_simulated()
35081 mt_set_in_rcu(mt); in check_rcu_simulated()
35093 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35097 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35110 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35114 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35127 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35131 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35144 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35148 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35160 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35164 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35176 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35180 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35201 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35206 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val)); in check_rcu_simulated()
35220 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35225 MT_BUG_ON(mt, mas_prev(&mas_reader, 0) != xa_mk_value(val)); in check_rcu_simulated()
35231 static noinline void __init check_rcu_threaded(struct maple_tree *mt) in check_rcu_threaded() argument
35241 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35245 vals.mt = mt; in check_rcu_threaded()
35256 run_check_rcu(mt, &vals); in check_rcu_threaded()
35257 mtree_destroy(mt); in check_rcu_threaded()
35259 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35261 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35268 vals.mt = mt; in check_rcu_threaded()
35277 run_check_rcu(mt, &vals); in check_rcu_threaded()
35278 mtree_destroy(mt); in check_rcu_threaded()
35281 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35282 run_check_rcu_slot_store(mt); in check_rcu_threaded()
35283 mtree_destroy(mt); in check_rcu_threaded()
35286 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35287 rcu_stress(mt, true); in check_rcu_threaded()
35288 mtree_destroy(mt); in check_rcu_threaded()
35291 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35292 rcu_stress(mt, false); in check_rcu_threaded()
35293 mtree_destroy(mt); in check_rcu_threaded()
35296 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35298 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35305 vals.mt = mt; in check_rcu_threaded()
35326 run_check_rcu_slowread(mt, &vals); in check_rcu_threaded()
35380 static void check_dfs_preorder(struct maple_tree *mt) in check_dfs_preorder() argument
35384 MA_STATE(mas, mt, 0, 0); in check_dfs_preorder()
35391 check_seq(mt, max, false); in check_dfs_preorder()
35396 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35397 mtree_destroy(mt); in check_dfs_preorder()
35399 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35405 check_seq(mt, max, false); in check_dfs_preorder()
35411 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35412 mtree_destroy(mt); in check_dfs_preorder()
35414 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35417 check_rev_seq(mt, max, false); in check_dfs_preorder()
35423 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35424 mtree_destroy(mt); in check_dfs_preorder()
35426 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35434 MT_BUG_ON(mt, mas_is_err(&mas)); in check_dfs_preorder()
35448 static noinline void __init check_prealloc(struct maple_tree *mt) in check_prealloc() argument
35455 MA_STATE(mas, mt, 10, 20); in check_prealloc()
35459 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_prealloc()
35463 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35466 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35467 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35470 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35472 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35475 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35476 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35477 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35480 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35483 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35486 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35488 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35491 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35494 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35496 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35499 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35501 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35502 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35505 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35509 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35512 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35514 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35516 MT_BUG_ON(mt, mas_allocated(&mas) != allocated); in check_prealloc()
35517 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35520 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35522 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35525 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35527 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35531 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35533 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35535 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35538 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35541 MT_BUG_ON(mt, allocated != 1); in check_prealloc()
35543 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35547 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35550 MT_BUG_ON(mt, allocated != 1 + height * 2); in check_prealloc()
35552 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35556 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35559 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35564 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35567 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35568 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35570 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35573 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35576 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35581 static noinline void __init check_spanning_write(struct maple_tree *mt) in check_spanning_write() argument
35584 MA_STATE(mas, mt, 1200, 2380); in check_spanning_write()
35587 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35589 mtree_lock(mt); in check_spanning_write()
35592 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35593 mtree_unlock(mt); in check_spanning_write()
35594 mtree_destroy(mt); in check_spanning_write()
35597 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35599 mtree_lock(mt); in check_spanning_write()
35603 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35604 mtree_unlock(mt); in check_spanning_write()
35605 mt_validate(mt); in check_spanning_write()
35606 mtree_destroy(mt); in check_spanning_write()
35609 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35611 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35614 mtree_lock(mt); in check_spanning_write()
35617 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35618 mtree_unlock(mt); in check_spanning_write()
35619 mtree_destroy(mt); in check_spanning_write()
35622 mt_init_flags(mt, 0); in check_spanning_write()
35624 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35627 mtree_lock(mt); in check_spanning_write()
35630 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35631 mtree_unlock(mt); in check_spanning_write()
35632 mtree_destroy(mt); in check_spanning_write()
35635 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35637 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35640 mtree_lock(mt); in check_spanning_write()
35643 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35644 mtree_unlock(mt); in check_spanning_write()
35645 mtree_destroy(mt); in check_spanning_write()
35648 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35650 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35653 mtree_lock(mt); in check_spanning_write()
35656 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35657 mtree_unlock(mt); in check_spanning_write()
35658 mtree_destroy(mt); in check_spanning_write()
35661 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35663 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35666 mtree_lock(mt); in check_spanning_write()
35669 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35670 mtree_unlock(mt); in check_spanning_write()
35671 mtree_destroy(mt); in check_spanning_write()
35677 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35679 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35682 mtree_lock(mt); in check_spanning_write()
35685 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35686 mtree_unlock(mt); in check_spanning_write()
35687 mtree_destroy(mt); in check_spanning_write()
35693 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35695 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35697 mtree_lock(mt); in check_spanning_write()
35700 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35701 mtree_unlock(mt); in check_spanning_write()
35702 mtree_destroy(mt); in check_spanning_write()
35705 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35707 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35708 mtree_lock(mt); in check_spanning_write()
35719 MT_BUG_ON(mt, (mas_data_end(&mas)) != mt_slot_count(mas.node) - 1); in check_spanning_write()
35722 mtree_unlock(mt); in check_spanning_write()
35723 mtree_destroy(mt); in check_spanning_write()
35726 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35728 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35730 mtree_lock(mt); in check_spanning_write()
35733 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35734 mtree_unlock(mt); in check_spanning_write()
35735 mtree_destroy(mt); in check_spanning_write()
35737 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35739 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35741 mtree_lock(mt); in check_spanning_write()
35744 mtree_unlock(mt); in check_spanning_write()
35749 static noinline void __init check_null_expand(struct maple_tree *mt) in check_null_expand() argument
35753 MA_STATE(mas, mt, 959, 959); in check_null_expand()
35756 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_null_expand()
35763 MT_BUG_ON(mt, mtree_load(mt, 963) != NULL); in check_null_expand()
35764 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35772 MT_BUG_ON(mt, mtree_load(mt, 884) != NULL); in check_null_expand()
35773 MT_BUG_ON(mt, mtree_load(mt, 889) != NULL); in check_null_expand()
35775 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35784 MT_BUG_ON(mt, mtree_load(mt, 899) != NULL); in check_null_expand()
35785 MT_BUG_ON(mt, mtree_load(mt, 900) != NULL); in check_null_expand()
35786 MT_BUG_ON(mt, mtree_load(mt, 905) != NULL); in check_null_expand()
35787 MT_BUG_ON(mt, mtree_load(mt, 906) != NULL); in check_null_expand()
35789 MT_BUG_ON(mt, data_end - 2 != mas_data_end(&mas)); in check_null_expand()
35798 MT_BUG_ON(mt, mtree_load(mt, 809) != NULL); in check_null_expand()
35799 MT_BUG_ON(mt, mtree_load(mt, 810) != NULL); in check_null_expand()
35800 MT_BUG_ON(mt, mtree_load(mt, 825) != NULL); in check_null_expand()
35801 MT_BUG_ON(mt, mtree_load(mt, 826) != NULL); in check_null_expand()
35803 MT_BUG_ON(mt, data_end - 4 != mas_data_end(&mas)); in check_null_expand()
35810 static noinline void __init check_nomem(struct maple_tree *mt) in check_nomem() argument
35812 MA_STATE(ms, mt, 1, 1); in check_nomem()
35814 MT_BUG_ON(mt, !mtree_empty(mt)); in check_nomem()
35819 MT_BUG_ON(mt, mtree_insert(mt, 1, &ms, GFP_ATOMIC) != -ENOMEM); in check_nomem()
35821 MT_BUG_ON(mt, mtree_insert(mt, 0, &ms, GFP_ATOMIC) != 0); in check_nomem()
35831 mtree_lock(mt); in check_nomem()
35833 MT_BUG_ON(mt, ms.node != MA_ERROR(-ENOMEM)); in check_nomem()
35835 MT_BUG_ON(mt, ms.node != MAS_START); in check_nomem()
35836 mtree_unlock(mt); in check_nomem()
35837 MT_BUG_ON(mt, mtree_insert(mt, 2, mt, GFP_KERNEL) != 0); in check_nomem()
35838 mtree_lock(mt); in check_nomem()
35841 mtree_unlock(mt); in check_nomem()
35842 mtree_destroy(mt); in check_nomem()
35845 static noinline void __init check_locky(struct maple_tree *mt) in check_locky() argument
35847 MA_STATE(ms, mt, 2, 2); in check_locky()
35848 MA_STATE(reader, mt, 2, 2); in check_locky()
35851 mt_set_in_rcu(mt); in check_locky()
35857 mt_clear_in_rcu(mt); in check_locky()