Lines Matching +full:reserved +full:- +full:memory
1 // SPDX-License-Identifier: GPL-2.0-or-later
40 * A simple test that tries to allocate a memory region within min_addr and
44 * | + +-----------+ |
46 * +----+-------+-----------+------+
55 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_simple_check()
71 rgn_end = rgn->base + rgn->size; in alloc_try_nid_top_down_simple_check()
76 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_simple_check()
77 ASSERT_EQ(rgn->base, max_addr - size); in alloc_try_nid_top_down_simple_check()
80 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_simple_check()
81 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_simple_check()
89 * A simple test that tries to allocate a memory region within min_addr and
93 * | + +---------+ + |
95 * +------+-------+---------+--+----+
107 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_end_misaligned_check()
124 rgn_end = rgn->base + rgn->size; in alloc_try_nid_top_down_end_misaligned_check()
129 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_end_misaligned_check()
130 ASSERT_EQ(rgn->base, max_addr - size - misalign); in alloc_try_nid_top_down_end_misaligned_check()
133 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_end_misaligned_check()
134 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_end_misaligned_check()
142 * A simple test that tries to allocate a memory region, which spans over the
146 * | +---------------+ |
148 * +------+---------------+-------+
158 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_exact_address_generic_check()
174 rgn_end = rgn->base + rgn->size; in alloc_try_nid_exact_address_generic_check()
179 ASSERT_EQ(rgn->size, size); in alloc_try_nid_exact_address_generic_check()
180 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_exact_address_generic_check()
183 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_exact_address_generic_check()
184 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_exact_address_generic_check()
192 * A test that tries to allocate a memory region, which can't fit into
196 * | +----------+-----+ |
198 * +--------+----------+-----+----+
205 * Expect to drop the lower limit and allocate a memory region which
210 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_narrow_range_check()
229 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_narrow_range_check()
230 ASSERT_EQ(rgn->base, max_addr - size); in alloc_try_nid_top_down_narrow_range_check()
232 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_narrow_range_check()
233 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_narrow_range_check()
241 * A test that tries to allocate a memory region, which can't fit into
243 * of the available memory:
245 * +-------------+
247 * +-------------+
251 * +-------+--------------+
285 * A test that tries to allocate a memory region within min_addr min_addr range,
289 * | +--------+---------------|
291 * +-------+--------+---------------+
300 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_min_reserved_generic_check()
313 min_addr = max_addr - r2_size; in alloc_try_nid_min_reserved_generic_check()
314 reserved_base = min_addr - r1_size; in alloc_try_nid_min_reserved_generic_check()
325 ASSERT_EQ(rgn->size, total_size); in alloc_try_nid_min_reserved_generic_check()
326 ASSERT_EQ(rgn->base, reserved_base); in alloc_try_nid_min_reserved_generic_check()
328 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_min_reserved_generic_check()
329 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_min_reserved_generic_check()
337 * A test that tries to allocate a memory region within min_addr and max_addr,
341 * | +-------------+--------|
343 * +----------+-------------+--------+
352 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_max_reserved_generic_check()
363 max_addr = memblock_end_of_DRAM() - r1_size; in alloc_try_nid_max_reserved_generic_check()
364 min_addr = max_addr - r2_size; in alloc_try_nid_max_reserved_generic_check()
375 ASSERT_EQ(rgn->size, total_size); in alloc_try_nid_max_reserved_generic_check()
376 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_max_reserved_generic_check()
378 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_max_reserved_generic_check()
379 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_max_reserved_generic_check()
387 * A test that tries to allocate memory within min_addr and max_add range, when
388 * there are two reserved regions at the borders, with a gap big enough to fit
392 * | +--------+ +-------+------+ |
394 * +----+--------+---+-------+------+--+
405 struct memblock_region *rgn1 = &memblock.reserved.regions[1]; in alloc_try_nid_top_down_reserved_with_space_check()
406 struct memblock_region *rgn2 = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_reserved_with_space_check()
418 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_top_down_reserved_with_space_check()
422 r2.base = r1.base - (r3_size + gap_size + r2.size); in alloc_try_nid_top_down_reserved_with_space_check()
438 ASSERT_EQ(rgn1->size, r1.size + r3_size); in alloc_try_nid_top_down_reserved_with_space_check()
439 ASSERT_EQ(rgn1->base, max_addr - r3_size); in alloc_try_nid_top_down_reserved_with_space_check()
441 ASSERT_EQ(rgn2->size, r2.size); in alloc_try_nid_top_down_reserved_with_space_check()
442 ASSERT_EQ(rgn2->base, r2.base); in alloc_try_nid_top_down_reserved_with_space_check()
444 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_top_down_reserved_with_space_check()
445 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_top_down_reserved_with_space_check()
453 * A test that tries to allocate memory within min_addr and max_add range, when
454 * there are two reserved regions at the borders, with a gap of a size equal to
458 * | +--------+--------+--------+ |
460 * +-----+--------+--------+--------+-----+
470 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_reserved_full_merge_generic_check()
481 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_reserved_full_merge_generic_check()
485 r2.base = r1.base - (r3_size + r2.size); in alloc_try_nid_reserved_full_merge_generic_check()
501 ASSERT_EQ(rgn->size, total_size); in alloc_try_nid_reserved_full_merge_generic_check()
502 ASSERT_EQ(rgn->base, r2.base); in alloc_try_nid_reserved_full_merge_generic_check()
504 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_reserved_full_merge_generic_check()
505 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_reserved_full_merge_generic_check()
513 * A test that tries to allocate memory within min_addr and max_add range, when
514 * there are two reserved regions at the borders, with a gap that can't fit
518 * | +----------+------+ +------+ |
520 * +--+----------+------+----+------+---+
532 struct memblock_region *rgn1 = &memblock.reserved.regions[1]; in alloc_try_nid_top_down_reserved_no_space_check()
533 struct memblock_region *rgn2 = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_reserved_no_space_check()
545 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_top_down_reserved_no_space_check()
549 r2.base = r1.base - (r2.size + gap_size); in alloc_try_nid_top_down_reserved_no_space_check()
565 ASSERT_EQ(rgn1->size, r1.size); in alloc_try_nid_top_down_reserved_no_space_check()
566 ASSERT_EQ(rgn1->base, r1.base); in alloc_try_nid_top_down_reserved_no_space_check()
568 ASSERT_EQ(rgn2->size, r2.size + r3_size); in alloc_try_nid_top_down_reserved_no_space_check()
569 ASSERT_EQ(rgn2->base, r2.base - r3_size); in alloc_try_nid_top_down_reserved_no_space_check()
571 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_top_down_reserved_no_space_check()
572 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_top_down_reserved_no_space_check()
580 * A test that tries to allocate memory within min_addr and max_add range, but
581 * it's too narrow and everything else is reserved:
583 * +-----------+
585 * +-----------+
587 * |--------------+ +----------|
589 * +--------------+------+----------+
611 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES; in alloc_try_nid_reserved_all_generic_check()
614 r2.size = MEM_SIZE - (r1.size + gap_size); in alloc_try_nid_reserved_all_generic_check()
635 * A test that tries to allocate a memory region, where max_addr is
636 * bigger than the end address of the available memory. Expect to allocate
637 * a region that ends before the end of the memory.
641 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_cap_max_check()
650 min_addr = memblock_end_of_DRAM() - SZ_1K; in alloc_try_nid_top_down_cap_max_check()
660 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_cap_max_check()
661 ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size); in alloc_try_nid_top_down_cap_max_check()
663 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_cap_max_check()
664 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_cap_max_check()
672 * A test that tries to allocate a memory region, where min_addr is
673 * smaller than the start address of the available memory. Expect to allocate
674 * a region that ends before the end of the memory.
678 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_cap_min_check()
687 min_addr = memblock_start_of_DRAM() - SZ_256; in alloc_try_nid_top_down_cap_min_check()
697 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_cap_min_check()
698 ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size); in alloc_try_nid_top_down_cap_min_check()
700 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_cap_min_check()
701 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_cap_min_check()
709 * A simple test that tries to allocate a memory region within min_addr and
713 * | +-----------+ | |
715 * +----+-----------+-----------+------+
724 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_simple_check()
740 rgn_end = rgn->base + rgn->size; in alloc_try_nid_bottom_up_simple_check()
745 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_simple_check()
746 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_bottom_up_simple_check()
749 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_simple_check()
750 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_simple_check()
758 * A simple test that tries to allocate a memory region within min_addr and
762 * | + +-----------+ + |
764 * +-----+---+-----------+-----+-----+
765 * ^ ^----. ^
776 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_start_misaligned_check()
793 rgn_end = rgn->base + rgn->size; in alloc_try_nid_bottom_up_start_misaligned_check()
798 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_start_misaligned_check()
799 ASSERT_EQ(rgn->base, min_addr + (SMP_CACHE_BYTES - misalign)); in alloc_try_nid_bottom_up_start_misaligned_check()
802 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_start_misaligned_check()
803 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_start_misaligned_check()
811 * A test that tries to allocate a memory region, which can't fit into min_addr
815 * |---------+ + + |
817 * +---------+---------+----+------+
824 * Expect to drop the lower limit and allocate a memory region which
825 * starts at the beginning of the available memory.
829 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_narrow_range_check()
848 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_narrow_range_check()
849 ASSERT_EQ(rgn->base, memblock_start_of_DRAM()); in alloc_try_nid_bottom_up_narrow_range_check()
851 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_narrow_range_check()
852 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_narrow_range_check()
860 * A test that tries to allocate memory within min_addr and max_add range, when
861 * there are two reserved regions at the borders, with a gap big enough to fit
865 * | +--------+-------+ +------+ |
867 * +----+--------+-------+---+------+--+
878 struct memblock_region *rgn1 = &memblock.reserved.regions[1]; in alloc_try_nid_bottom_up_reserved_with_space_check()
879 struct memblock_region *rgn2 = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_reserved_with_space_check()
891 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_bottom_up_reserved_with_space_check()
895 r2.base = r1.base - (r3_size + gap_size + r2.size); in alloc_try_nid_bottom_up_reserved_with_space_check()
911 ASSERT_EQ(rgn1->size, r1.size); in alloc_try_nid_bottom_up_reserved_with_space_check()
912 ASSERT_EQ(rgn1->base, max_addr); in alloc_try_nid_bottom_up_reserved_with_space_check()
914 ASSERT_EQ(rgn2->size, r2.size + r3_size); in alloc_try_nid_bottom_up_reserved_with_space_check()
915 ASSERT_EQ(rgn2->base, r2.base); in alloc_try_nid_bottom_up_reserved_with_space_check()
917 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_bottom_up_reserved_with_space_check()
918 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_bottom_up_reserved_with_space_check()
926 * A test that tries to allocate memory within min_addr and max_add range, when
927 * there are two reserved regions at the borders, with a gap of a size equal to
931 * |----------+ +------+ +----+ |
933 * +----------+----+------+---+----+--+
940 * Expect to drop the lower limit and allocate memory at the beginning of the
941 * available memory. The region counter and total size fields get updated.
947 struct memblock_region *rgn1 = &memblock.reserved.regions[2]; in alloc_try_nid_bottom_up_reserved_no_space_check()
948 struct memblock_region *rgn2 = &memblock.reserved.regions[1]; in alloc_try_nid_bottom_up_reserved_no_space_check()
949 struct memblock_region *rgn3 = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_reserved_no_space_check()
961 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_bottom_up_reserved_no_space_check()
965 r2.base = r1.base - (r2.size + gap_size); in alloc_try_nid_bottom_up_reserved_no_space_check()
981 ASSERT_EQ(rgn3->size, r3_size); in alloc_try_nid_bottom_up_reserved_no_space_check()
982 ASSERT_EQ(rgn3->base, memblock_start_of_DRAM()); in alloc_try_nid_bottom_up_reserved_no_space_check()
984 ASSERT_EQ(rgn2->size, r2.size); in alloc_try_nid_bottom_up_reserved_no_space_check()
985 ASSERT_EQ(rgn2->base, r2.base); in alloc_try_nid_bottom_up_reserved_no_space_check()
987 ASSERT_EQ(rgn1->size, r1.size); in alloc_try_nid_bottom_up_reserved_no_space_check()
988 ASSERT_EQ(rgn1->base, r1.base); in alloc_try_nid_bottom_up_reserved_no_space_check()
990 ASSERT_EQ(memblock.reserved.cnt, 3); in alloc_try_nid_bottom_up_reserved_no_space_check()
991 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_bottom_up_reserved_no_space_check()
999 * A test that tries to allocate a memory region, where max_addr is
1000 * bigger than the end address of the available memory. Expect to allocate
1005 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_cap_max_check()
1024 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_cap_max_check()
1025 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_bottom_up_cap_max_check()
1027 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_cap_max_check()
1028 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_cap_max_check()
1036 * A test that tries to allocate a memory region, where min_addr is
1037 * smaller than the start address of the available memory. Expect to allocate
1038 * a region at the beginning of the available memory.
1042 struct memblock_region *rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_cap_min_check()
1052 max_addr = memblock_end_of_DRAM() - SZ_256; in alloc_try_nid_bottom_up_cap_min_check()
1061 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_cap_min_check()
1062 ASSERT_EQ(rgn->base, memblock_start_of_DRAM()); in alloc_try_nid_bottom_up_cap_min_check()
1064 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_cap_min_check()
1065 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_cap_min_check()
1228 * A test that tries to allocate a memory region in a specific NUMA node that
1229 * has enough memory to allocate a region of the requested size.
1235 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_simple_check()
1236 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_simple_check()
1245 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_top_down_numa_simple_check()
1246 size = req_node->size / SZ_4; in alloc_try_nid_top_down_numa_simple_check()
1256 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_simple_check()
1257 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_try_nid_top_down_numa_simple_check()
1258 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_simple_check()
1260 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_simple_check()
1261 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_simple_check()
1269 * A test that tries to allocate a memory region in a specific NUMA node that
1270 * does not have enough memory to allocate a region of the requested size:
1272 * | +-----+ +------------------+ |
1274 * +---+-----+----------+------------------+-----+
1276 * | +---------+ |
1278 * +-----------------------------+---------+-----+
1281 * enough memory (in this case, nid = 6) after falling back to NUMA_NO_NODE.
1287 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_small_node_check()
1288 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_small_node_check()
1289 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_small_node_check()
1298 size = SZ_2 * req_node->size; in alloc_try_nid_top_down_numa_small_node_check()
1308 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_small_node_check()
1309 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_try_nid_top_down_numa_small_node_check()
1310 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_small_node_check()
1312 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_small_node_check()
1313 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_small_node_check()
1321 * A test that tries to allocate a memory region in a specific NUMA node that
1322 * is fully reserved:
1324 * | +---------+ +------------------+ |
1326 * +--------------+---------+------------+------------------+-----+
1328 * | +---------+ +---------+ |
1329 * | | reserved| | new | |
1330 * +--------------+---------+---------------------+---------+-----+
1333 * large enough and has enough unreserved memory (in this case, nid = 6) after
1340 struct memblock_region *new_rgn = &memblock.reserved.regions[1]; in alloc_try_nid_top_down_numa_node_reserved_check()
1341 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_node_reserved_check()
1342 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_node_reserved_check()
1351 size = req_node->size; in alloc_try_nid_top_down_numa_node_reserved_check()
1355 memblock_reserve(req_node->base, req_node->size); in alloc_try_nid_top_down_numa_node_reserved_check()
1362 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_node_reserved_check()
1363 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_try_nid_top_down_numa_node_reserved_check()
1364 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_node_reserved_check()
1366 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_top_down_numa_node_reserved_check()
1367 ASSERT_EQ(memblock.reserved.total_size, size + req_node->size); in alloc_try_nid_top_down_numa_node_reserved_check()
1375 * A test that tries to allocate a memory region in a specific NUMA node that
1376 * is partially reserved but has enough memory for the allocated region:
1378 * | +---------------------------------------+ |
1380 * +-----------+---------------------------------------+----------+
1382 * | +------------------+ +-----+ |
1383 * | | reserved | | new | |
1384 * +-----------+------------------+--------------+-----+----------+
1392 struct memblock_region *new_rgn = &memblock.reserved.regions[1]; in alloc_try_nid_top_down_numa_part_reserved_check()
1393 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_part_reserved_check()
1403 ASSERT_LE(SZ_8, req_node->size); in alloc_try_nid_top_down_numa_part_reserved_check()
1404 r1.base = req_node->base; in alloc_try_nid_top_down_numa_part_reserved_check()
1405 r1.size = req_node->size / SZ_2; in alloc_try_nid_top_down_numa_part_reserved_check()
1417 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_part_reserved_check()
1418 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_try_nid_top_down_numa_part_reserved_check()
1419 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_part_reserved_check()
1421 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_top_down_numa_part_reserved_check()
1422 ASSERT_EQ(memblock.reserved.total_size, size + r1.size); in alloc_try_nid_top_down_numa_part_reserved_check()
1430 * A test that tries to allocate a memory region in a specific NUMA node that
1431 * is partially reserved and does not have enough contiguous memory for the
1434 * | +-----------------------+ +----------------------|
1436 * +-----------+-----------------------+---------+----------------------+
1438 * | +----------+ +-----------|
1439 * | | reserved | | new |
1440 * +-----------------+----------+---------------------------+-----------+
1443 * large enough and has enough unreserved memory (in this case,
1444 * nid = NUMA_NODES - 1) after falling back to NUMA_NO_NODE. The region count
1450 int nid_exp = NUMA_NODES - 1; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1451 struct memblock_region *new_rgn = &memblock.reserved.regions[1]; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1452 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1453 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1463 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1464 size = req_node->size / SZ_2; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1465 r1.base = req_node->base + (size / SZ_2); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1478 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1479 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1480 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1482 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1483 ASSERT_EQ(memblock.reserved.total_size, size + r1.size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1491 * A test that tries to allocate a memory region that spans over the min_addr
1499 * | +-----------------------+-----------+ |
1501 * +-----------+-----------------------+-----------+--------------+
1503 * | +-----------+ |
1505 * +-----------------------+-----------+--------------------------+
1507 * Expect to drop the lower limit and allocate a memory region that ends at
1513 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_split_range_low_check()
1514 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_split_range_low_check()
1525 min_addr = req_node_end - SZ_256; in alloc_try_nid_top_down_numa_split_range_low_check()
1534 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_split_range_low_check()
1535 ASSERT_EQ(new_rgn->base, req_node_end - size); in alloc_try_nid_top_down_numa_split_range_low_check()
1536 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_split_range_low_check()
1538 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_split_range_low_check()
1539 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_split_range_low_check()
1547 * A test that tries to allocate a memory region that spans over the min_addr
1555 * | +--------------------------+---------+ |
1557 * +------+--------------------------+---------+----------------+
1559 * | +---------+ |
1561 * +-----------------------+---------+--------------------------+
1563 * Expect to drop the lower limit and allocate a memory region that
1569 int nid_exp = nid_req - 1; in alloc_try_nid_top_down_numa_split_range_high_check()
1570 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_split_range_high_check()
1571 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_split_range_high_check()
1582 min_addr = exp_node_end - SZ_256; in alloc_try_nid_top_down_numa_split_range_high_check()
1591 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_split_range_high_check()
1592 ASSERT_EQ(new_rgn->base, exp_node_end - size); in alloc_try_nid_top_down_numa_split_range_high_check()
1593 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_split_range_high_check()
1595 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_split_range_high_check()
1596 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_split_range_high_check()
1604 * A test that tries to allocate a memory region that spans over the min_addr
1612 * | +---------------+ +-------------+---------+ |
1614 * +----+---------------+--------+-------------+---------+----------+
1616 * | +---------+ |
1618 * +----------+---------+-------------------------------------------+
1620 * Expect to drop the lower limit and allocate a memory region that ends at
1626 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1627 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1628 struct memblock_region *node2 = &memblock.memory.regions[6]; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1638 min_addr = node2->base - SZ_256; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1647 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1648 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1649 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1651 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1652 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1660 * A test that tries to allocate memory within min_addr and max_add range when
1669 * |-----------+ +----------+----...----+----------+ |
1671 * +-----------+-----------+----------+----...----+----------+------+
1673 * | +-----+ |
1675 * +---------------------------------------------------+-----+------+
1677 * Expect to allocate a memory region at the end of the final node in
1683 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1684 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1685 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1694 min_addr = min_node->base; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1703 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1704 ASSERT_EQ(new_rgn->base, max_addr - size); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1705 ASSERT_LE(max_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1707 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1708 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1716 * A test that tries to allocate memory within min_addr and max_add range when
1725 * | +----------+----...----+----------+ +-----------+ |
1727 * +-----+----------+----...----+----------+--------+-----------+---+
1729 * | +-----+ |
1731 * +---------------------------------+-----+------------------------+
1733 * Expect to allocate a memory region at the end of the final node in
1739 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1740 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1741 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1750 min_addr = min_node->base; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1759 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1760 ASSERT_EQ(new_rgn->base, max_addr - size); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1761 ASSERT_LE(max_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1763 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1764 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1772 * A test that tries to allocate a memory region in a specific NUMA node that
1773 * has enough memory to allocate a region of the requested size.
1779 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_simple_check()
1780 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_simple_check()
1789 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_bottom_up_numa_simple_check()
1790 size = req_node->size / SZ_4; in alloc_try_nid_bottom_up_numa_simple_check()
1800 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_simple_check()
1801 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_simple_check()
1804 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_simple_check()
1805 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_simple_check()
1813 * A test that tries to allocate a memory region in a specific NUMA node that
1814 * does not have enough memory to allocate a region of the requested size:
1816 * |----------------------+-----+ |
1818 * +----------------------+-----+----------------+
1820 * |---------+ |
1822 * +---------+-----------------------------------+
1825 * has enough memory (in this case, nid = 0) after falling back to NUMA_NO_NODE.
1831 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_small_node_check()
1832 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_small_node_check()
1833 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_small_node_check()
1842 size = SZ_2 * req_node->size; in alloc_try_nid_bottom_up_numa_small_node_check()
1852 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_small_node_check()
1853 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_small_node_check()
1856 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_small_node_check()
1857 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_small_node_check()
1865 * A test that tries to allocate a memory region in a specific NUMA node that
1866 * is fully reserved:
1868 * |----------------------+ +-----------+ |
1870 * +----------------------+-----+-----------+--------------------+
1872 * |-----------+ +-----------+ |
1873 * | new | | reserved | |
1874 * +-----------+----------------+-----------+--------------------+
1877 * is large enough and has enough unreserved memory (in this case, nid = 0)
1885 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1886 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1887 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1896 size = req_node->size; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1900 memblock_reserve(req_node->base, req_node->size); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1907 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1908 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1911 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1912 ASSERT_EQ(memblock.reserved.total_size, size + req_node->size); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1920 * A test that tries to allocate a memory region in a specific NUMA node that
1921 * is partially reserved but has enough memory for the allocated region:
1923 * | +---------------------------------------+ |
1925 * +-----------+---------------------------------------+---------+
1927 * | +------------------+-----+ |
1928 * | | reserved | new | |
1929 * +-----------+------------------+-----+------------------------+
1932 * the existing reserved region. The total size gets updated.
1937 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1938 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1949 ASSERT_LE(SZ_8, req_node->size); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1950 r1.base = req_node->base; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1951 r1.size = req_node->size / SZ_2; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1964 ASSERT_EQ(new_rgn->size, total_size); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1965 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1968 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1969 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1977 * A test that tries to allocate a memory region in a specific NUMA node that
1978 * is partially reserved and does not have enough contiguous memory for the
1981 * |----------------------+ +-----------------------+ |
1983 * +----------------------+-------+-----------------------+---------+
1985 * |-----------+ +----------+ |
1986 * | new | | reserved | |
1987 * +-----------+------------------------+----------+----------------+
1990 * node that is large enough and has enough unreserved memory (in this case,
1998 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
1999 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2000 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2010 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2011 size = req_node->size / SZ_2; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2012 r1.base = req_node->base + (size / SZ_2); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2025 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2026 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2029 ASSERT_EQ(memblock.reserved.cnt, 2); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2030 ASSERT_EQ(memblock.reserved.total_size, size + r1.size); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2038 * A test that tries to allocate a memory region that spans over the min_addr
2046 * | +-----------------------+-----------+ |
2048 * +-----------+-----------------------+-----------+--------------+
2050 * | +-----------+ |
2052 * +-----------+-----------+--------------------------------------+
2054 * Expect to drop the lower limit and allocate a memory region at the beginning
2060 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_split_range_low_check()
2061 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_split_range_low_check()
2072 min_addr = req_node_end - SZ_256; in alloc_try_nid_bottom_up_numa_split_range_low_check()
2081 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_split_range_low_check()
2082 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_split_range_low_check()
2085 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_split_range_low_check()
2086 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_split_range_low_check()
2094 * A test that tries to allocate a memory region that spans over the min_addr
2102 * |------------------+ +----------------------+---------+ |
2104 * +------------------+--------+----------------------+---------+------+
2106 * |---------+ |
2108 * +---------+---------------------------------------------------------+
2110 * Expect to drop the lower limit and allocate a memory region at the beginning
2111 * of the first node that has enough memory.
2117 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2118 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2119 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2130 min_addr = req_node->base - SZ_256; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2139 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_split_range_high_check()
2140 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_split_range_high_check()
2143 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_split_range_high_check()
2144 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_split_range_high_check()
2152 * A test that tries to allocate a memory region that spans over the min_addr
2160 * | +---------------+ +-------------+---------+ |
2162 * +----+---------------+--------+-------------+---------+---------+
2164 * | +---------+ |
2166 * +----+---------+------------------------------------------------+
2168 * Expect to drop the lower limit and allocate a memory region that starts at
2174 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2175 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2176 struct memblock_region *node2 = &memblock.memory.regions[6]; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2186 min_addr = node2->base - SZ_256; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2195 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2196 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2199 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2200 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2208 * A test that tries to allocate memory within min_addr and max_add range when
2217 * |-----------+ +----------+----...----+----------+ |
2219 * +-----------+-----------+----------+----...----+----------+------+
2221 * | +-----+ |
2223 * +-----------------------+-----+----------------------------------+
2225 * Expect to allocate a memory region at the beginning of the first node
2231 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2232 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2233 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2242 min_addr = min_node->base; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2251 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2252 ASSERT_EQ(new_rgn->base, min_addr); in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2255 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2256 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2264 * A test that tries to allocate memory within min_addr and max_add range when
2273 * | +----------+----...----+----------+ +---------+ |
2275 * +-----+----------+----...----+----------+---------+---------+---+
2277 * | +-----+ |
2279 * +-----+-----+---------------------------------------------------+
2281 * Expect to allocate a memory region at the beginning of the first node
2287 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2288 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2289 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2298 min_addr = min_node->base; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2307 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2308 ASSERT_EQ(new_rgn->base, min_addr); in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2311 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2312 ASSERT_EQ(memblock.reserved.total_size, size); in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2320 * A test that tries to allocate a memory region in a specific NUMA node that
2321 * does not have enough memory to allocate a region of the requested size.
2322 * Additionally, none of the nodes have enough memory to allocate the region:
2324 * +-----------------------------------+
2326 * +-----------------------------------+
2327 * |-------+-------+-------+-------+-------+-------+-------+-------|
2329 * +-------+-------+-------+-------+-------+-------+-------+-------+
2357 * A test that tries to allocate memory within min_addr and max_addr range when
2358 * there are two reserved regions at the borders. The requested node starts at
2366 * | +-----------+-----------------------+-----------------------|
2368 * +------+-----------+-----------------------+-----------------------+
2370 * | +----+-----------------------+----+ |
2372 * +-------------+----+-----------------------+----+------------------+
2381 struct memblock_region *new_rgn = &memblock.reserved.regions[0]; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2382 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2383 struct memblock_region *next_node = &memblock.memory.regions[nid_next]; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2386 phys_addr_t size = req_node->size; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2394 r1.base = next_node->base; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2398 r2.base = r1.base - (size + r2.size); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2413 ASSERT_EQ(new_rgn->size, total_size); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2414 ASSERT_EQ(new_rgn->base, r2.base); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2416 ASSERT_LE(new_rgn->base, req_node->base); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2419 ASSERT_EQ(memblock.reserved.cnt, 1); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2420 ASSERT_EQ(memblock.reserved.total_size, total_size); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2428 * A test that tries to allocate memory within min_addr and max_add range,
2430 * and everything else is reserved. Additionally, nid is set to NUMA_NO_NODE
2433 * +-----------+
2435 * +-----------+
2436 * | +---------------------+-----------|
2438 * +------+---------------------+-----------+
2440 * |----------------------+ +-----|
2442 * +----------------------+-----------+-----+
2454 struct memblock_region *next_node = &memblock.memory.regions[7]; in alloc_try_nid_numa_split_all_reserved_generic_check()
2463 r2.base = next_node->base + SZ_128; in alloc_try_nid_numa_split_all_reserved_generic_check()
2464 r2.size = memblock_end_of_DRAM() - r2.base; in alloc_try_nid_numa_split_all_reserved_generic_check()
2466 r1.size = MEM_SIZE - (r2.size + size); in alloc_try_nid_numa_split_all_reserved_generic_check()