Lines Matching full:mt
28 #define RCU_MT_BUG_ON(test, y) {if (y) { test->stop = true; } MT_BUG_ON(test->mt, y); }
31 struct maple_tree *mt; member
61 static noinline void check_new_node(struct maple_tree *mt) in check_new_node() argument
69 MA_STATE(mas, mt, 0, 0); in check_new_node()
72 mtree_lock(mt); in check_new_node()
77 MT_BUG_ON(mt, mas_alloc_req(&mas) != 3); in check_new_node()
79 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
80 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
82 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
84 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
85 MT_BUG_ON(mt, mn == NULL); in check_new_node()
86 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
87 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
90 mtree_unlock(mt); in check_new_node()
94 mtree_lock(mt); in check_new_node()
98 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
101 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
104 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
105 MT_BUG_ON(mt, mn == NULL); in check_new_node()
106 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
107 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
108 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
118 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
120 MT_BUG_ON(mt, mas_alloc_req(&mas) != 0); in check_new_node()
122 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
123 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
124 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
126 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
132 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
135 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
136 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
139 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
140 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
141 MT_BUG_ON(mt, mn == NULL); in check_new_node()
142 MT_BUG_ON(mt, mn->slot[0] != NULL); in check_new_node()
143 MT_BUG_ON(mt, mn->slot[1] != NULL); in check_new_node()
144 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
146 MT_BUG_ON(mt, mas_allocated(&mas) != 1); in check_new_node()
147 MT_BUG_ON(mt, mas.alloc->node_count); in check_new_node()
150 MT_BUG_ON(mt, mas_alloc_req(&mas) != 2); in check_new_node()
152 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
153 MT_BUG_ON(mt, mas_allocated(&mas) != 3); in check_new_node()
154 MT_BUG_ON(mt, mas.alloc == NULL); in check_new_node()
155 MT_BUG_ON(mt, mas.alloc->slot[0] == NULL); in check_new_node()
156 MT_BUG_ON(mt, mas.alloc->slot[1] == NULL); in check_new_node()
159 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
160 MT_BUG_ON(mt, !mn); in check_new_node()
161 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
167 MT_BUG_ON(mt, mas_alloc_req(&mas) != total); in check_new_node()
169 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
189 MT_BUG_ON(mt, mas.alloc->node_count != e); in check_new_node()
191 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
192 MT_BUG_ON(mt, mas_allocated(&mas) != i - 1); in check_new_node()
193 MT_BUG_ON(mt, !mn); in check_new_node()
201 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
204 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
205 MT_BUG_ON(mt, !mn); in check_new_node()
206 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
208 MT_BUG_ON(mt, mas_allocated(&mas) != j); in check_new_node()
210 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
211 MT_BUG_ON(mt, mas_allocated(&mas) != j - 1); in check_new_node()
214 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
218 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
220 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
222 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
228 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
230 MT_BUG_ON(mt, mas_allocated(&mas) != i); in check_new_node()
232 MT_BUG_ON(mt, mas_allocated(&mas) != i - j); in check_new_node()
234 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
236 MT_BUG_ON(mt, mas_allocated(&mas) != i - j - 1); in check_new_node()
238 MT_BUG_ON(mt, mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
247 MT_BUG_ON(mt, !mas.alloc); in check_new_node()
253 MT_BUG_ON(mt, !smn->slot[j]); in check_new_node()
259 MT_BUG_ON(mt, mas_allocated(&mas) != total); in check_new_node()
262 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
266 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
269 MT_BUG_ON(mt, mn == NULL); in check_new_node()
270 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
273 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
277 MA_STATE(mas2, mt, 0, 0); in check_new_node()
280 MT_BUG_ON(mt, mas_allocated(&mas) != i); /* check request filled */ in check_new_node()
283 MT_BUG_ON(mt, mn == NULL); in check_new_node()
284 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
286 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
288 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
289 MT_BUG_ON(mt, mas_allocated(&mas2) != i); in check_new_node()
292 MT_BUG_ON(mt, mas_allocated(&mas2) != j); in check_new_node()
294 MT_BUG_ON(mt, mn == NULL); in check_new_node()
295 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
298 MT_BUG_ON(mt, mas_allocated(&mas2) != 0); in check_new_node()
302 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
304 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
305 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
306 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
307 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
310 MT_BUG_ON(mt, mn == NULL); in check_new_node()
311 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
312 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS); in check_new_node()
313 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 2); in check_new_node()
316 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
317 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
321 MT_BUG_ON(mt, mas_alloc_req(&mas) != 1); in check_new_node()
322 MT_BUG_ON(mt, mas.node != MA_ERROR(-ENOMEM)); in check_new_node()
323 MT_BUG_ON(mt, !mas_nomem(&mas, GFP_KERNEL)); in check_new_node()
324 MT_BUG_ON(mt, mas_alloc_req(&mas)); in check_new_node()
325 MT_BUG_ON(mt, mas.alloc->node_count); in check_new_node()
326 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
328 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
329 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 1); in check_new_node()
330 MT_BUG_ON(mt, mas.alloc->node_count != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
332 MT_BUG_ON(mt, mas.alloc->node_count); in check_new_node()
333 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS + 2); in check_new_node()
335 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
339 MT_BUG_ON(mt, not_empty(mn)); in check_new_node()
342 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_new_node()
395 MT_BUG_ON(mt, mas_allocated(&mas) != 5); in check_new_node()
400 MT_BUG_ON(mt, mas_allocated(&mas) != 10); in check_new_node()
406 MT_BUG_ON(mt, mas_allocated(&mas) != MAPLE_ALLOC_SLOTS - 1); in check_new_node()
411 MT_BUG_ON(mt, mas_allocated(&mas) != 10 + MAPLE_ALLOC_SLOTS - 1); in check_new_node()
414 mtree_unlock(mt); in check_new_node()
420 static noinline void check_erase(struct maple_tree *mt, unsigned long index, in check_erase() argument
423 MT_BUG_ON(mt, mtree_test_erase(mt, index) != ptr); in check_erase()
426 #define erase_check_load(mt, i) check_load(mt, set[i], entry[i%2]) argument
427 #define erase_check_insert(mt, i) check_insert(mt, set[i], entry[i%2]) argument
428 #define erase_check_erase(mt, i) check_erase(mt, set[i], entry[i%2]) argument
430 static noinline void check_erase_testset(struct maple_tree *mt) in check_erase_testset() argument
448 void *entry[2] = { ptr, mt }; in check_erase_testset()
453 mt_set_in_rcu(mt); in check_erase_testset()
455 erase_check_insert(mt, i); in check_erase_testset()
457 erase_check_load(mt, i); in check_erase_testset()
460 erase_check_erase(mt, 1); in check_erase_testset()
461 erase_check_load(mt, 0); in check_erase_testset()
462 check_load(mt, set[1], NULL); in check_erase_testset()
464 erase_check_load(mt, i); in check_erase_testset()
467 erase_check_erase(mt, 2); in check_erase_testset()
468 erase_check_load(mt, 0); in check_erase_testset()
469 check_load(mt, set[1], NULL); in check_erase_testset()
470 check_load(mt, set[2], NULL); in check_erase_testset()
472 erase_check_insert(mt, 1); in check_erase_testset()
473 erase_check_insert(mt, 2); in check_erase_testset()
476 erase_check_load(mt, i); in check_erase_testset()
479 erase_check_load(mt, 3); in check_erase_testset()
480 erase_check_erase(mt, 1); in check_erase_testset()
481 erase_check_load(mt, 0); in check_erase_testset()
482 check_load(mt, set[1], NULL); in check_erase_testset()
484 erase_check_load(mt, i); in check_erase_testset()
490 root_node = mt->ma_root; in check_erase_testset()
491 erase_check_insert(mt, 1); in check_erase_testset()
493 erase_check_load(mt, 0); in check_erase_testset()
494 check_load(mt, 5016, NULL); in check_erase_testset()
495 erase_check_load(mt, 1); in check_erase_testset()
496 check_load(mt, 5013, NULL); in check_erase_testset()
497 erase_check_load(mt, 2); in check_erase_testset()
498 check_load(mt, 5018, NULL); in check_erase_testset()
499 erase_check_load(mt, 3); in check_erase_testset()
501 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
502 erase_check_load(mt, 0); in check_erase_testset()
503 check_load(mt, 5016, NULL); in check_erase_testset()
504 erase_check_load(mt, 1); in check_erase_testset()
505 check_load(mt, 5013, NULL); in check_erase_testset()
506 check_load(mt, set[2], NULL); in check_erase_testset()
507 check_load(mt, 5018, NULL); in check_erase_testset()
509 erase_check_load(mt, 3); in check_erase_testset()
511 root_node = mt->ma_root; in check_erase_testset()
512 erase_check_insert(mt, 2); in check_erase_testset()
514 erase_check_load(mt, 0); in check_erase_testset()
515 check_load(mt, 5016, NULL); in check_erase_testset()
516 erase_check_load(mt, 1); in check_erase_testset()
517 check_load(mt, 5013, NULL); in check_erase_testset()
518 erase_check_load(mt, 2); in check_erase_testset()
519 check_load(mt, 5018, NULL); in check_erase_testset()
520 erase_check_load(mt, 3); in check_erase_testset()
523 erase_check_erase(mt, 2); /* erase 5017 to check append */ in check_erase_testset()
524 erase_check_load(mt, 0); in check_erase_testset()
525 check_load(mt, 5016, NULL); in check_erase_testset()
526 check_load(mt, set[2], NULL); in check_erase_testset()
527 erase_check_erase(mt, 0); /* erase 5015 to check append */ in check_erase_testset()
528 check_load(mt, set[0], NULL); in check_erase_testset()
529 check_load(mt, 5016, NULL); in check_erase_testset()
530 erase_check_insert(mt, 4); /* 1000 < Should not split. */ in check_erase_testset()
531 check_load(mt, set[0], NULL); in check_erase_testset()
532 check_load(mt, 5016, NULL); in check_erase_testset()
533 erase_check_load(mt, 1); in check_erase_testset()
534 check_load(mt, 5013, NULL); in check_erase_testset()
535 check_load(mt, set[2], NULL); in check_erase_testset()
536 check_load(mt, 5018, NULL); in check_erase_testset()
537 erase_check_load(mt, 4); in check_erase_testset()
538 check_load(mt, 999, NULL); in check_erase_testset()
539 check_load(mt, 1001, NULL); in check_erase_testset()
540 erase_check_load(mt, 4); in check_erase_testset()
541 if (mt_in_rcu(mt)) in check_erase_testset()
542 MT_BUG_ON(mt, root_node == mt->ma_root); in check_erase_testset()
544 MT_BUG_ON(mt, root_node != mt->ma_root); in check_erase_testset()
547 MT_BUG_ON(mt, !mte_is_leaf(mt->ma_root)); in check_erase_testset()
551 erase_check_insert(mt, 0); in check_erase_testset()
552 erase_check_insert(mt, 2); in check_erase_testset()
555 erase_check_insert(mt, i); in check_erase_testset()
557 erase_check_load(mt, j); in check_erase_testset()
560 erase_check_erase(mt, 14); /*6015 */ in check_erase_testset()
563 check_load(mt, set[i], NULL); in check_erase_testset()
565 erase_check_load(mt, i); in check_erase_testset()
567 erase_check_erase(mt, 16); /*7002 */ in check_erase_testset()
570 check_load(mt, set[i], NULL); in check_erase_testset()
572 erase_check_load(mt, i); in check_erase_testset()
577 erase_check_erase(mt, 13); /*6012 */ in check_erase_testset()
580 check_load(mt, set[i], NULL); in check_erase_testset()
582 erase_check_load(mt, i); in check_erase_testset()
585 erase_check_erase(mt, 15); /*7003 */ in check_erase_testset()
588 check_load(mt, set[i], NULL); in check_erase_testset()
590 erase_check_load(mt, i); in check_erase_testset()
594 erase_check_erase(mt, 17); /*7008 *should* cause coalesce. */ in check_erase_testset()
597 check_load(mt, set[i], NULL); in check_erase_testset()
599 erase_check_load(mt, i); in check_erase_testset()
602 erase_check_erase(mt, 18); /*7012 */ in check_erase_testset()
605 check_load(mt, set[i], NULL); in check_erase_testset()
607 erase_check_load(mt, i); in check_erase_testset()
611 erase_check_erase(mt, 19); /*7015 */ in check_erase_testset()
614 check_load(mt, set[i], NULL); in check_erase_testset()
616 erase_check_load(mt, i); in check_erase_testset()
619 erase_check_erase(mt, 20); /*8003 */ in check_erase_testset()
622 check_load(mt, set[i], NULL); in check_erase_testset()
624 erase_check_load(mt, i); in check_erase_testset()
627 erase_check_erase(mt, 21); /*8002 */ in check_erase_testset()
630 check_load(mt, set[i], NULL); in check_erase_testset()
632 erase_check_load(mt, i); in check_erase_testset()
636 erase_check_erase(mt, 22); /*8008 */ in check_erase_testset()
639 check_load(mt, set[i], NULL); in check_erase_testset()
641 erase_check_load(mt, i); in check_erase_testset()
644 erase_check_erase(mt, i); in check_erase_testset()
648 check_load(mt, set[i], NULL); in check_erase_testset()
650 erase_check_load(mt, i); in check_erase_testset()
656 erase_check_insert(mt, i); in check_erase_testset()
660 erase_check_erase(mt, i); in check_erase_testset()
663 erase_check_load(mt, j); in check_erase_testset()
665 check_load(mt, set[j], NULL); in check_erase_testset()
670 erase_check_erase(mt, i); in check_erase_testset()
673 erase_check_load(mt, j); in check_erase_testset()
675 check_load(mt, set[j], NULL); in check_erase_testset()
678 erase_check_insert(mt, 8); in check_erase_testset()
679 erase_check_insert(mt, 9); in check_erase_testset()
680 erase_check_erase(mt, 8); in check_erase_testset()
687 #define erase_check_store_range(mt, a, i, ptr) mtree_test_store_range(mt, \ argument
934 static noinline void check_erase2_testset(struct maple_tree *mt, in check_erase2_testset() argument
943 MA_STATE(mas, mt, 0, 0); in check_erase2_testset()
950 MA_STATE(mas_start, mt, set[i+1], set[i+1]); in check_erase2_testset()
951 MA_STATE(mas_end, mt, set[i+2], set[i+2]); in check_erase2_testset()
978 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1006 erase_check_store_range(mt, set, i + 1, value); in check_erase2_testset()
1011 check_erase(mt, set[i+1], xa_mk_value(set[i+1])); in check_erase2_testset()
1015 mt_validate(mt); in check_erase2_testset()
1017 MT_BUG_ON(mt, !mt_height(mt)); in check_erase2_testset()
1019 mt_dump(mt); in check_erase2_testset()
1027 mt_for_each(mt, foo, addr, ULONG_MAX) { in check_erase2_testset()
1030 pr_err("mt: %lu -> %p (%d)\n", addr+1, foo, check); in check_erase2_testset()
1040 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1053 MT_BUG_ON(mt, 1); in check_erase2_testset()
1068 mt_validate(mt); in check_erase2_testset()
1071 MT_BUG_ON(mt, check != entry_count); in check_erase2_testset()
1073 MT_BUG_ON(mt, mtree_load(mas.tree, 0) != NULL); in check_erase2_testset()
1079 static noinline void check_erase2_sets(struct maple_tree *mt) in check_erase2_sets() argument
33819 * Gap was found: mt 4041162752 gap_end 4041183232 in check_erase2_sets()
33851 MA_STATE(mas, mt, 0, 0); in check_erase2_sets()
33854 check_erase2_testset(mt, set, ARRAY_SIZE(set)); in check_erase2_sets()
33856 mtree_destroy(mt); in check_erase2_sets()
33858 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33859 check_erase2_testset(mt, set2, ARRAY_SIZE(set2)); in check_erase2_sets()
33861 MT_BUG_ON(mt, !!mt_find(mt, &start, 140735933906943UL)); in check_erase2_sets()
33862 mtree_destroy(mt); in check_erase2_sets()
33865 mt_init_flags(mt, 0); in check_erase2_sets()
33866 check_erase2_testset(mt, set3, ARRAY_SIZE(set3)); in check_erase2_sets()
33868 mtree_destroy(mt); in check_erase2_sets()
33870 mt_init_flags(mt, 0); in check_erase2_sets()
33871 check_erase2_testset(mt, set4, ARRAY_SIZE(set4)); in check_erase2_sets()
33879 mtree_destroy(mt); in check_erase2_sets()
33881 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33883 check_erase2_testset(mt, set5, ARRAY_SIZE(set5)); in check_erase2_sets()
33886 mtree_destroy(mt); in check_erase2_sets()
33888 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33889 check_erase2_testset(mt, set6, ARRAY_SIZE(set6)); in check_erase2_sets()
33891 mtree_destroy(mt); in check_erase2_sets()
33893 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33894 check_erase2_testset(mt, set7, ARRAY_SIZE(set7)); in check_erase2_sets()
33896 mtree_destroy(mt); in check_erase2_sets()
33898 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33899 check_erase2_testset(mt, set8, ARRAY_SIZE(set8)); in check_erase2_sets()
33901 mtree_destroy(mt); in check_erase2_sets()
33903 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33904 check_erase2_testset(mt, set9, ARRAY_SIZE(set9)); in check_erase2_sets()
33906 mtree_destroy(mt); in check_erase2_sets()
33908 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33909 check_erase2_testset(mt, set10, ARRAY_SIZE(set10)); in check_erase2_sets()
33911 mtree_destroy(mt); in check_erase2_sets()
33914 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33915 check_erase2_testset(mt, set11, ARRAY_SIZE(set11)); in check_erase2_sets()
33918 MT_BUG_ON(mt, mas.last != 140014592573439); in check_erase2_sets()
33919 mtree_destroy(mt); in check_erase2_sets()
33922 mas.tree = mt; in check_erase2_sets()
33925 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33926 check_erase2_testset(mt, set12, ARRAY_SIZE(set12)); in check_erase2_sets()
33934 mtree_destroy(mt); in check_erase2_sets()
33937 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33938 check_erase2_testset(mt, set13, ARRAY_SIZE(set13)); in check_erase2_sets()
33939 mtree_erase(mt, 140373516443648); in check_erase2_sets()
33943 mtree_destroy(mt); in check_erase2_sets()
33944 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33945 check_erase2_testset(mt, set14, ARRAY_SIZE(set14)); in check_erase2_sets()
33947 mtree_destroy(mt); in check_erase2_sets()
33949 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33950 check_erase2_testset(mt, set15, ARRAY_SIZE(set15)); in check_erase2_sets()
33952 mtree_destroy(mt); in check_erase2_sets()
33957 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33958 check_erase2_testset(mt, set16, ARRAY_SIZE(set16)); in check_erase2_sets()
33961 MT_BUG_ON(mt, mas.last != 139921865547775); in check_erase2_sets()
33963 mtree_destroy(mt); in check_erase2_sets()
33972 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33973 check_erase2_testset(mt, set17, ARRAY_SIZE(set17)); in check_erase2_sets()
33976 MT_BUG_ON(mt, mas.last != 139953197322239); in check_erase2_sets()
33977 /* MT_BUG_ON(mt, mas.index != 139953197318144); */ in check_erase2_sets()
33979 mtree_destroy(mt); in check_erase2_sets()
33988 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
33989 check_erase2_testset(mt, set18, ARRAY_SIZE(set18)); in check_erase2_sets()
33992 MT_BUG_ON(mt, mas.last != 140222968475647); in check_erase2_sets()
33993 /*MT_BUG_ON(mt, mas.index != 140222966259712); */ in check_erase2_sets()
33995 mtree_destroy(mt); in check_erase2_sets()
34006 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34007 check_erase2_testset(mt, set19, ARRAY_SIZE(set19)); in check_erase2_sets()
34011 MT_BUG_ON(mt, entry != xa_mk_value(140656779083776)); in check_erase2_sets()
34013 MT_BUG_ON(mt, entry != xa_mk_value(140656766251008)); in check_erase2_sets()
34015 mtree_destroy(mt); in check_erase2_sets()
34022 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34023 check_erase2_testset(mt, set20, ARRAY_SIZE(set20)); in check_erase2_sets()
34025 check_load(mt, 94849009414144, NULL); in check_erase2_sets()
34027 mtree_destroy(mt); in check_erase2_sets()
34030 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34031 check_erase2_testset(mt, set21, ARRAY_SIZE(set21)); in check_erase2_sets()
34033 mt_validate(mt); in check_erase2_sets()
34035 mtree_destroy(mt); in check_erase2_sets()
34038 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34039 check_erase2_testset(mt, set22, ARRAY_SIZE(set22)); in check_erase2_sets()
34041 mt_validate(mt); in check_erase2_sets()
34042 ptr = mtree_load(mt, 140551363362816); in check_erase2_sets()
34043 MT_BUG_ON(mt, ptr == mtree_load(mt, 140551363420159)); in check_erase2_sets()
34045 mtree_destroy(mt); in check_erase2_sets()
34048 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34049 check_erase2_testset(mt, set23, ARRAY_SIZE(set23)); in check_erase2_sets()
34052 mt_validate(mt); in check_erase2_sets()
34053 mtree_destroy(mt); in check_erase2_sets()
34057 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34058 check_erase2_testset(mt, set24, ARRAY_SIZE(set24)); in check_erase2_sets()
34061 mt_validate(mt); in check_erase2_sets()
34062 mtree_destroy(mt); in check_erase2_sets()
34065 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34066 check_erase2_testset(mt, set25, ARRAY_SIZE(set25)); in check_erase2_sets()
34069 mt_validate(mt); in check_erase2_sets()
34070 mtree_destroy(mt); in check_erase2_sets()
34075 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34076 check_erase2_testset(mt, set26, ARRAY_SIZE(set26)); in check_erase2_sets()
34079 MT_BUG_ON(mt, mas.last != 140109040959487); in check_erase2_sets()
34081 mt_validate(mt); in check_erase2_sets()
34082 mtree_destroy(mt); in check_erase2_sets()
34087 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34088 check_erase2_testset(mt, set27, ARRAY_SIZE(set27)); in check_erase2_sets()
34090 MT_BUG_ON(mt, 0 != mtree_load(mt, 140415537422336)); in check_erase2_sets()
34092 mt_validate(mt); in check_erase2_sets()
34093 mtree_destroy(mt); in check_erase2_sets()
34097 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34098 check_erase2_testset(mt, set28, ARRAY_SIZE(set28)); in check_erase2_sets()
34103 MT_BUG_ON(mt, mas.index != 139918401601536); in check_erase2_sets()
34105 mt_validate(mt); in check_erase2_sets()
34106 mtree_destroy(mt); in check_erase2_sets()
34113 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34114 check_erase2_testset(mt, set29, ARRAY_SIZE(set29)); in check_erase2_sets()
34117 mt_validate(mt); in check_erase2_sets()
34118 mtree_destroy(mt); in check_erase2_sets()
34126 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34127 check_erase2_testset(mt, set30, ARRAY_SIZE(set30)); in check_erase2_sets()
34130 mt_validate(mt); in check_erase2_sets()
34131 mtree_destroy(mt); in check_erase2_sets()
34139 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34140 check_erase2_testset(mt, set31, ARRAY_SIZE(set31)); in check_erase2_sets()
34143 mt_validate(mt); in check_erase2_sets()
34144 mtree_destroy(mt); in check_erase2_sets()
34148 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34149 check_erase2_testset(mt, set32, ARRAY_SIZE(set32)); in check_erase2_sets()
34152 mt_validate(mt); in check_erase2_sets()
34153 mtree_destroy(mt); in check_erase2_sets()
34157 * mt 140582827569152 gap_end 140582869532672 in check_erase2_sets()
34168 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34169 check_erase2_testset(mt, set33, ARRAY_SIZE(set33)); in check_erase2_sets()
34172 MT_BUG_ON(mt, mas.last != 140583003750399); in check_erase2_sets()
34174 mt_validate(mt); in check_erase2_sets()
34175 mtree_destroy(mt); in check_erase2_sets()
34183 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34184 check_erase2_testset(mt, set34, ARRAY_SIZE(set34)); in check_erase2_sets()
34187 mt_validate(mt); in check_erase2_sets()
34188 mtree_destroy(mt); in check_erase2_sets()
34193 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34194 check_erase2_testset(mt, set35, ARRAY_SIZE(set35)); in check_erase2_sets()
34197 mt_validate(mt); in check_erase2_sets()
34198 mtree_destroy(mt); in check_erase2_sets()
34203 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34204 check_erase2_testset(mt, set36, ARRAY_SIZE(set36)); in check_erase2_sets()
34207 mt_validate(mt); in check_erase2_sets()
34208 mtree_destroy(mt); in check_erase2_sets()
34211 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34212 check_erase2_testset(mt, set37, ARRAY_SIZE(set37)); in check_erase2_sets()
34214 MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34215 mt_validate(mt); in check_erase2_sets()
34216 mtree_destroy(mt); in check_erase2_sets()
34219 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34220 check_erase2_testset(mt, set38, ARRAY_SIZE(set38)); in check_erase2_sets()
34222 MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712)); in check_erase2_sets()
34223 mt_validate(mt); in check_erase2_sets()
34224 mtree_destroy(mt); in check_erase2_sets()
34227 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34228 check_erase2_testset(mt, set39, ARRAY_SIZE(set39)); in check_erase2_sets()
34230 mt_validate(mt); in check_erase2_sets()
34231 mtree_destroy(mt); in check_erase2_sets()
34234 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34235 check_erase2_testset(mt, set40, ARRAY_SIZE(set40)); in check_erase2_sets()
34237 mt_validate(mt); in check_erase2_sets()
34238 mtree_destroy(mt); in check_erase2_sets()
34241 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34242 check_erase2_testset(mt, set41, ARRAY_SIZE(set41)); in check_erase2_sets()
34244 mt_validate(mt); in check_erase2_sets()
34245 mtree_destroy(mt); in check_erase2_sets()
34250 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34251 check_erase2_testset(mt, set42, ARRAY_SIZE(set42)); in check_erase2_sets()
34254 MT_BUG_ON(mt, mas.last != 4041211903); in check_erase2_sets()
34256 mt_validate(mt); in check_erase2_sets()
34257 mtree_destroy(mt); in check_erase2_sets()
34262 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_erase2_sets()
34263 check_erase2_testset(mt, set43, ARRAY_SIZE(set43)); in check_erase2_sets()
34266 mt_validate(mt); in check_erase2_sets()
34267 mtree_destroy(mt); in check_erase2_sets()
34306 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_fwd()
34416 MA_STATE(mas, test->mt, 0, 0); in rcu_reader_rev()
34478 mt_dump(test->mt); in rcu_reader_rev()
34541 static void rcu_stress_rev(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_rev() argument
34561 mtree_store_range(mt, start, end, in rcu_stress_rev()
34571 mtree_store_range(mt, start, end, in rcu_stress_rev()
34582 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_rev()
34589 mtree_store_range(mt, start, end, in rcu_stress_rev()
34600 static void rcu_stress_fwd(struct maple_tree *mt, struct rcu_test_struct2 *test, in rcu_stress_fwd() argument
34618 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34628 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34639 mtree_store_range(mt, start, end, NULL, GFP_KERNEL); in rcu_stress_fwd()
34646 mtree_store_range(mt, start, end, in rcu_stress_fwd()
34668 static void rcu_stress(struct maple_tree *mt, bool forward) in rcu_stress() argument
34678 test.mt = mt; in rcu_stress()
34690 mtree_store_range(mt, seed, r, in rcu_stress()
34722 mtree_store_range(mt, test.index[add], test.last[add], in rcu_stress()
34726 mt_set_in_rcu(mt); in rcu_stress()
34732 rcu_stress_fwd(mt, &test, count, test_reader); in rcu_stress()
34734 rcu_stress_rev(mt, &test, count, test_reader); in rcu_stress()
34740 mt_validate(mt); in rcu_stress()
34745 struct maple_tree *mt; /* the maple tree */ member
34824 entry = mtree_load(test->mt, test->index); in rcu_val()
34825 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, &update_2, in rcu_val()
34845 MA_STATE(mas, test->mt, test->range_start, test->range_start); in rcu_loop()
34866 MT_BUG_ON(test->mt, entry != expected); in rcu_loop()
34874 MT_BUG_ON(test->mt, eval_rcu_entry(test, entry, in rcu_loop()
34887 void run_check_rcu(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu() argument
34894 mt_set_in_rcu(mt); in run_check_rcu()
34895 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu()
34910 mtree_store_range(mt, vals->index, vals->last, vals->entry2, in run_check_rcu()
34916 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu()
34920 void run_check_rcu_slowread(struct maple_tree *mt, struct rcu_test_struct *vals) in run_check_rcu_slowread() argument
34928 mt_set_in_rcu(mt); in run_check_rcu_slowread()
34929 MT_BUG_ON(mt, !mt_in_rcu(mt)); in run_check_rcu_slowread()
34946 mtree_store(mt, index, in run_check_rcu_slowread()
34957 MT_BUG_ON(mt, !vals->seen_entry2); in run_check_rcu_slowread()
34958 MT_BUG_ON(mt, !vals->seen_entry3); in run_check_rcu_slowread()
34959 MT_BUG_ON(mt, !vals->seen_both); in run_check_rcu_slowread()
34961 static noinline void check_rcu_simulated(struct maple_tree *mt) in check_rcu_simulated() argument
34967 MA_STATE(mas_writer, mt, 0, 0); in check_rcu_simulated()
34968 MA_STATE(mas_reader, mt, target, target); in check_rcu_simulated()
34972 mt_set_in_rcu(mt); in check_rcu_simulated()
34984 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
34988 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35001 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35005 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35018 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35022 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35035 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35039 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35051 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35055 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35067 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35071 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(val)); in check_rcu_simulated()
35092 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35097 MT_BUG_ON(mt, mas_next(&mas_reader, ULONG_MAX) != xa_mk_value(val)); in check_rcu_simulated()
35111 MT_BUG_ON(mt, mas_walk(&mas_reader) != xa_mk_value(target/10)); in check_rcu_simulated()
35116 MT_BUG_ON(mt, mas_prev(&mas_reader, 0) != xa_mk_value(val)); in check_rcu_simulated()
35122 static noinline void check_rcu_threaded(struct maple_tree *mt) in check_rcu_threaded() argument
35132 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35136 vals.mt = mt; in check_rcu_threaded()
35147 run_check_rcu(mt, &vals); in check_rcu_threaded()
35148 mtree_destroy(mt); in check_rcu_threaded()
35150 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35152 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35159 vals.mt = mt; in check_rcu_threaded()
35168 run_check_rcu(mt, &vals); in check_rcu_threaded()
35169 mtree_destroy(mt); in check_rcu_threaded()
35173 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35174 rcu_stress(mt, true); in check_rcu_threaded()
35175 mtree_destroy(mt); in check_rcu_threaded()
35178 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35179 rcu_stress(mt, false); in check_rcu_threaded()
35180 mtree_destroy(mt); in check_rcu_threaded()
35183 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_rcu_threaded()
35185 mtree_store_range(mt, i*10, i*10 + 5, in check_rcu_threaded()
35192 vals.mt = mt; in check_rcu_threaded()
35213 run_check_rcu_slowread(mt, &vals); in check_rcu_threaded()
35263 static void check_dfs_preorder(struct maple_tree *mt) in check_dfs_preorder() argument
35267 MA_STATE(mas, mt, 0, 0); in check_dfs_preorder()
35274 check_seq(mt, max, false); in check_dfs_preorder()
35279 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35280 mtree_destroy(mt); in check_dfs_preorder()
35282 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35288 check_seq(mt, max, false); in check_dfs_preorder()
35294 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35295 mtree_destroy(mt); in check_dfs_preorder()
35297 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35300 check_rev_seq(mt, max, false); in check_dfs_preorder()
35306 MT_BUG_ON(mt, count != e); in check_dfs_preorder()
35307 mtree_destroy(mt); in check_dfs_preorder()
35309 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_dfs_preorder()
35317 MT_BUG_ON(mt, mas_is_err(&mas)); in check_dfs_preorder()
35331 static noinline void check_prealloc(struct maple_tree *mt) in check_prealloc() argument
35338 MA_STATE(mas, mt, 10, 20); in check_prealloc()
35342 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_prealloc()
35344 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35347 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35348 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35351 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35353 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35356 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35357 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35358 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35361 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35364 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35367 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35368 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35370 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35372 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35375 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35377 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35380 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35381 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35383 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35384 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35387 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35390 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35393 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35394 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35396 MT_BUG_ON(mt, mas_allocated(&mas) != allocated - 1); in check_prealloc()
35398 MT_BUG_ON(mt, mas_allocated(&mas) != allocated); in check_prealloc()
35399 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35402 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35404 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35407 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35408 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35410 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35412 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35415 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35416 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35418 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35419 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35422 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35423 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35426 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35429 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35430 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35432 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35434 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35437 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35441 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0); in check_prealloc()
35444 MT_BUG_ON(mt, allocated == 0); in check_prealloc()
35445 MT_BUG_ON(mt, allocated != 1 + height * 3); in check_prealloc()
35447 MT_BUG_ON(mt, mas_allocated(&mas) != 0); in check_prealloc()
35449 MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) == 0); in check_prealloc()
35452 MT_BUG_ON(mt, allocated != 0); in check_prealloc()
35457 static noinline void check_spanning_write(struct maple_tree *mt) in check_spanning_write() argument
35460 MA_STATE(mas, mt, 1200, 2380); in check_spanning_write()
35463 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35465 mtree_lock(mt); in check_spanning_write()
35468 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35469 mtree_unlock(mt); in check_spanning_write()
35470 mtree_destroy(mt); in check_spanning_write()
35473 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35475 mtree_lock(mt); in check_spanning_write()
35479 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35480 mtree_unlock(mt); in check_spanning_write()
35481 mt_validate(mt); in check_spanning_write()
35482 mtree_destroy(mt); in check_spanning_write()
35485 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35487 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35490 mtree_lock(mt); in check_spanning_write()
35493 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35494 mtree_unlock(mt); in check_spanning_write()
35495 mtree_destroy(mt); in check_spanning_write()
35498 mt_init_flags(mt, 0); in check_spanning_write()
35500 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35503 mtree_lock(mt); in check_spanning_write()
35506 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35507 mtree_unlock(mt); in check_spanning_write()
35508 mtree_destroy(mt); in check_spanning_write()
35511 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35513 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35516 mtree_lock(mt); in check_spanning_write()
35519 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35520 mtree_unlock(mt); in check_spanning_write()
35521 mtree_destroy(mt); in check_spanning_write()
35524 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35526 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35529 mtree_lock(mt); in check_spanning_write()
35532 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35533 mtree_unlock(mt); in check_spanning_write()
35534 mtree_destroy(mt); in check_spanning_write()
35537 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35539 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35542 mtree_lock(mt); in check_spanning_write()
35545 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35546 mtree_unlock(mt); in check_spanning_write()
35547 mtree_destroy(mt); in check_spanning_write()
35553 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35555 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35558 mtree_lock(mt); in check_spanning_write()
35561 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35562 mtree_unlock(mt); in check_spanning_write()
35563 mtree_destroy(mt); in check_spanning_write()
35569 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35571 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35573 mtree_lock(mt); in check_spanning_write()
35576 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35577 mtree_unlock(mt); in check_spanning_write()
35578 mtree_destroy(mt); in check_spanning_write()
35581 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35583 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35584 mtree_lock(mt); in check_spanning_write()
35595 MT_BUG_ON(mt, (mas_data_end(&mas)) != mt_slot_count(mas.node) - 1); in check_spanning_write()
35598 mtree_unlock(mt); in check_spanning_write()
35599 mtree_destroy(mt); in check_spanning_write()
35602 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35604 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35606 mtree_lock(mt); in check_spanning_write()
35609 MT_BUG_ON(mt, mas_walk(&mas) != NULL); in check_spanning_write()
35610 mtree_unlock(mt); in check_spanning_write()
35611 mtree_destroy(mt); in check_spanning_write()
35613 mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE); in check_spanning_write()
35615 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_spanning_write()
35617 mtree_lock(mt); in check_spanning_write()
35620 mtree_unlock(mt); in check_spanning_write()
35625 static noinline void check_null_expand(struct maple_tree *mt) in check_null_expand() argument
35629 MA_STATE(mas, mt, 959, 959); in check_null_expand()
35632 mtree_test_store_range(mt, i * 10, i * 10 + 5, &i); in check_null_expand()
35639 MT_BUG_ON(mt, mtree_load(mt, 963) != NULL); in check_null_expand()
35640 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35648 MT_BUG_ON(mt, mtree_load(mt, 884) != NULL); in check_null_expand()
35649 MT_BUG_ON(mt, mtree_load(mt, 889) != NULL); in check_null_expand()
35651 MT_BUG_ON(mt, data_end != mas_data_end(&mas)); in check_null_expand()
35660 MT_BUG_ON(mt, mtree_load(mt, 899) != NULL); in check_null_expand()
35661 MT_BUG_ON(mt, mtree_load(mt, 900) != NULL); in check_null_expand()
35662 MT_BUG_ON(mt, mtree_load(mt, 905) != NULL); in check_null_expand()
35663 MT_BUG_ON(mt, mtree_load(mt, 906) != NULL); in check_null_expand()
35665 MT_BUG_ON(mt, data_end - 2 != mas_data_end(&mas)); in check_null_expand()
35674 MT_BUG_ON(mt, mtree_load(mt, 809) != NULL); in check_null_expand()
35675 MT_BUG_ON(mt, mtree_load(mt, 810) != NULL); in check_null_expand()
35676 MT_BUG_ON(mt, mtree_load(mt, 825) != NULL); in check_null_expand()
35677 MT_BUG_ON(mt, mtree_load(mt, 826) != NULL); in check_null_expand()
35679 MT_BUG_ON(mt, data_end - 4 != mas_data_end(&mas)); in check_null_expand()
35686 static noinline void check_nomem(struct maple_tree *mt) in check_nomem() argument
35688 MA_STATE(ms, mt, 1, 1); in check_nomem()
35690 MT_BUG_ON(mt, !mtree_empty(mt)); in check_nomem()
35695 MT_BUG_ON(mt, mtree_insert(mt, 1, &ms, GFP_ATOMIC) != -ENOMEM); in check_nomem()
35697 MT_BUG_ON(mt, mtree_insert(mt, 0, &ms, GFP_ATOMIC) != 0); in check_nomem()
35707 mtree_lock(mt); in check_nomem()
35709 MT_BUG_ON(mt, ms.node != MA_ERROR(-ENOMEM)); in check_nomem()
35711 MT_BUG_ON(mt, ms.node != MAS_START); in check_nomem()
35712 mtree_unlock(mt); in check_nomem()
35713 MT_BUG_ON(mt, mtree_insert(mt, 2, mt, GFP_KERNEL) != 0); in check_nomem()
35714 mtree_lock(mt); in check_nomem()
35717 mtree_unlock(mt); in check_nomem()
35718 mtree_destroy(mt); in check_nomem()
35721 static noinline void check_locky(struct maple_tree *mt) in check_locky() argument
35723 MA_STATE(ms, mt, 2, 2); in check_locky()
35724 MA_STATE(reader, mt, 2, 2); in check_locky()
35727 mt_set_in_rcu(mt); in check_locky()
35733 mt_clear_in_rcu(mt); in check_locky()