Lines Matching +full:memory +full:- +full:region

1 // SPDX-License-Identifier: GPL-2.0-or-later
40 * A simple test that tries to allocate a memory region within min_addr and
44 * | + +-----------+ |
46 * +----+-------+-----------+------+
51 * Expect to allocate a region that ends at max_addr.
71 rgn_end = rgn->base + rgn->size; in alloc_try_nid_top_down_simple_check()
76 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_simple_check()
77 ASSERT_EQ(rgn->base, max_addr - size); in alloc_try_nid_top_down_simple_check()
89 * A simple test that tries to allocate a memory region within min_addr and
93 * | + +---------+ + |
95 * +------+-------+---------+--+----+
103 * Expect to allocate an aligned region that ends before max_addr.
124 rgn_end = rgn->base + rgn->size; in alloc_try_nid_top_down_end_misaligned_check()
129 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_end_misaligned_check()
130 ASSERT_EQ(rgn->base, max_addr - size - misalign); in alloc_try_nid_top_down_end_misaligned_check()
142 * A simple test that tries to allocate a memory region, which spans over the
146 * | +---------------+ |
148 * +------+---------------+-------+
153 * Expect to allocate a region that starts at min_addr and ends at
174 rgn_end = rgn->base + rgn->size; in alloc_try_nid_exact_address_generic_check()
179 ASSERT_EQ(rgn->size, size); in alloc_try_nid_exact_address_generic_check()
180 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_exact_address_generic_check()
192 * A test that tries to allocate a memory region, which can't fit into
196 * | +----------+-----+ |
198 * +--------+----------+-----+----+
205 * Expect to drop the lower limit and allocate a memory region which
229 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_narrow_range_check()
230 ASSERT_EQ(rgn->base, max_addr - size); in alloc_try_nid_top_down_narrow_range_check()
241 * A test that tries to allocate a memory region, which can't fit into
243 * of the available memory:
245 * +-------------+
247 * +-------------+
251 * +-------+--------------+
285 * A test that tries to allocate a memory region within min_addr min_addr range,
286 * with min_addr being so close that it's next to an allocated region:
289 * | +--------+---------------|
291 * +-------+--------+---------------+
296 * Expect a merge of both regions. Only the region size gets updated.
313 min_addr = max_addr - r2_size; in alloc_try_nid_min_reserved_generic_check()
314 reserved_base = min_addr - r1_size; in alloc_try_nid_min_reserved_generic_check()
325 ASSERT_EQ(rgn->size, total_size); in alloc_try_nid_min_reserved_generic_check()
326 ASSERT_EQ(rgn->base, reserved_base); in alloc_try_nid_min_reserved_generic_check()
337 * A test that tries to allocate a memory region within min_addr and max_addr,
338 * with max_addr being so close that it's next to an allocated region:
341 * | +-------------+--------|
343 * +----------+-------------+--------+
348 * Expect a merge of regions. Only the region size gets updated.
363 max_addr = memblock_end_of_DRAM() - r1_size; in alloc_try_nid_max_reserved_generic_check()
364 min_addr = max_addr - r2_size; in alloc_try_nid_max_reserved_generic_check()
375 ASSERT_EQ(rgn->size, total_size); in alloc_try_nid_max_reserved_generic_check()
376 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_max_reserved_generic_check()
387 * A test that tries to allocate memory within min_addr and max_add range, when
389 * a new region:
392 * | +--------+ +-------+------+ |
394 * +----+--------+---+-------+------+--+
399 * Expect to merge the new region with r1. The second region does not get
408 struct region r1, r2; in alloc_try_nid_top_down_reserved_with_space_check()
418 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_top_down_reserved_with_space_check()
422 r2.base = r1.base - (r3_size + gap_size + r2.size); in alloc_try_nid_top_down_reserved_with_space_check()
438 ASSERT_EQ(rgn1->size, r1.size + r3_size); in alloc_try_nid_top_down_reserved_with_space_check()
439 ASSERT_EQ(rgn1->base, max_addr - r3_size); in alloc_try_nid_top_down_reserved_with_space_check()
441 ASSERT_EQ(rgn2->size, r2.size); in alloc_try_nid_top_down_reserved_with_space_check()
442 ASSERT_EQ(rgn2->base, r2.base); in alloc_try_nid_top_down_reserved_with_space_check()
453 * A test that tries to allocate memory within min_addr and max_add range, when
455 * the size of the new region:
458 * | +--------+--------+--------+ |
460 * +-----+--------+--------+--------+-----+
465 * Expect to merge all of the regions into one. The region counter and total
472 struct region r1, r2; in alloc_try_nid_reserved_full_merge_generic_check()
481 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_reserved_full_merge_generic_check()
485 r2.base = r1.base - (r3_size + r2.size); in alloc_try_nid_reserved_full_merge_generic_check()
501 ASSERT_EQ(rgn->size, total_size); in alloc_try_nid_reserved_full_merge_generic_check()
502 ASSERT_EQ(rgn->base, r2.base); in alloc_try_nid_reserved_full_merge_generic_check()
513 * A test that tries to allocate memory within min_addr and max_add range, when
515 * a new region:
518 * | +----------+------+ +------+ |
520 * +--+----------+------+----+------+---+
527 * Expect to merge the new region with r2. The second region does not get
535 struct region r1, r2; in alloc_try_nid_top_down_reserved_no_space_check()
545 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_top_down_reserved_no_space_check()
549 r2.base = r1.base - (r2.size + gap_size); in alloc_try_nid_top_down_reserved_no_space_check()
565 ASSERT_EQ(rgn1->size, r1.size); in alloc_try_nid_top_down_reserved_no_space_check()
566 ASSERT_EQ(rgn1->base, r1.base); in alloc_try_nid_top_down_reserved_no_space_check()
568 ASSERT_EQ(rgn2->size, r2.size + r3_size); in alloc_try_nid_top_down_reserved_no_space_check()
569 ASSERT_EQ(rgn2->base, r2.base - r3_size); in alloc_try_nid_top_down_reserved_no_space_check()
580 * A test that tries to allocate memory within min_addr and max_add range, but
583 * +-----------+
585 * +-----------+
587 * |--------------+ +----------|
589 * +--------------+------+----------+
602 struct region r1, r2; in alloc_try_nid_reserved_all_generic_check()
611 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES; in alloc_try_nid_reserved_all_generic_check()
614 r2.size = MEM_SIZE - (r1.size + gap_size); in alloc_try_nid_reserved_all_generic_check()
635 * A test that tries to allocate a memory region, where max_addr is
636 * bigger than the end address of the available memory. Expect to allocate
637 * a region that ends before the end of the memory.
650 min_addr = memblock_end_of_DRAM() - SZ_1K; in alloc_try_nid_top_down_cap_max_check()
660 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_cap_max_check()
661 ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size); in alloc_try_nid_top_down_cap_max_check()
672 * A test that tries to allocate a memory region, where min_addr is
673 * smaller than the start address of the available memory. Expect to allocate
674 * a region that ends before the end of the memory.
687 min_addr = memblock_start_of_DRAM() - SZ_256; in alloc_try_nid_top_down_cap_min_check()
697 ASSERT_EQ(rgn->size, size); in alloc_try_nid_top_down_cap_min_check()
698 ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size); in alloc_try_nid_top_down_cap_min_check()
709 * A simple test that tries to allocate a memory region within min_addr and
713 * | +-----------+ | |
715 * +----+-----------+-----------+------+
720 * Expect to allocate a region that ends before max_addr.
740 rgn_end = rgn->base + rgn->size; in alloc_try_nid_bottom_up_simple_check()
745 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_simple_check()
746 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_bottom_up_simple_check()
758 * A simple test that tries to allocate a memory region within min_addr and
762 * | + +-----------+ + |
764 * +-----+---+-----------+-----+-----+
765 * ^ ^----. ^
772 * Expect to allocate an aligned region that ends before max_addr.
793 rgn_end = rgn->base + rgn->size; in alloc_try_nid_bottom_up_start_misaligned_check()
798 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_start_misaligned_check()
799 ASSERT_EQ(rgn->base, min_addr + (SMP_CACHE_BYTES - misalign)); in alloc_try_nid_bottom_up_start_misaligned_check()
811 * A test that tries to allocate a memory region, which can't fit into min_addr
815 * |---------+ + + |
817 * +---------+---------+----+------+
824 * Expect to drop the lower limit and allocate a memory region which
825 * starts at the beginning of the available memory.
848 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_narrow_range_check()
849 ASSERT_EQ(rgn->base, memblock_start_of_DRAM()); in alloc_try_nid_bottom_up_narrow_range_check()
860 * A test that tries to allocate memory within min_addr and max_add range, when
862 * a new region:
865 * | +--------+-------+ +------+ |
867 * +----+--------+-------+---+------+--+
872 * Expect to merge the new region with r2. The second region does not get
881 struct region r1, r2; in alloc_try_nid_bottom_up_reserved_with_space_check()
891 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_bottom_up_reserved_with_space_check()
895 r2.base = r1.base - (r3_size + gap_size + r2.size); in alloc_try_nid_bottom_up_reserved_with_space_check()
911 ASSERT_EQ(rgn1->size, r1.size); in alloc_try_nid_bottom_up_reserved_with_space_check()
912 ASSERT_EQ(rgn1->base, max_addr); in alloc_try_nid_bottom_up_reserved_with_space_check()
914 ASSERT_EQ(rgn2->size, r2.size + r3_size); in alloc_try_nid_bottom_up_reserved_with_space_check()
915 ASSERT_EQ(rgn2->base, r2.base); in alloc_try_nid_bottom_up_reserved_with_space_check()
926 * A test that tries to allocate memory within min_addr and max_add range, when
928 * the size of the new region:
931 * |----------+ +------+ +----+ |
933 * +----------+----+------+---+----+--+
940 * Expect to drop the lower limit and allocate memory at the beginning of the
941 * available memory. The region counter and total size fields get updated.
951 struct region r1, r2; in alloc_try_nid_bottom_up_reserved_no_space_check()
961 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_try_nid_bottom_up_reserved_no_space_check()
965 r2.base = r1.base - (r2.size + gap_size); in alloc_try_nid_bottom_up_reserved_no_space_check()
981 ASSERT_EQ(rgn3->size, r3_size); in alloc_try_nid_bottom_up_reserved_no_space_check()
982 ASSERT_EQ(rgn3->base, memblock_start_of_DRAM()); in alloc_try_nid_bottom_up_reserved_no_space_check()
984 ASSERT_EQ(rgn2->size, r2.size); in alloc_try_nid_bottom_up_reserved_no_space_check()
985 ASSERT_EQ(rgn2->base, r2.base); in alloc_try_nid_bottom_up_reserved_no_space_check()
987 ASSERT_EQ(rgn1->size, r1.size); in alloc_try_nid_bottom_up_reserved_no_space_check()
988 ASSERT_EQ(rgn1->base, r1.base); in alloc_try_nid_bottom_up_reserved_no_space_check()
999 * A test that tries to allocate a memory region, where max_addr is
1000 * bigger than the end address of the available memory. Expect to allocate
1001 * a region that starts at the min_addr.
1024 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_cap_max_check()
1025 ASSERT_EQ(rgn->base, min_addr); in alloc_try_nid_bottom_up_cap_max_check()
1036 * A test that tries to allocate a memory region, where min_addr is
1037 * smaller than the start address of the available memory. Expect to allocate
1038 * a region at the beginning of the available memory.
1052 max_addr = memblock_end_of_DRAM() - SZ_256; in alloc_try_nid_bottom_up_cap_min_check()
1061 ASSERT_EQ(rgn->size, size); in alloc_try_nid_bottom_up_cap_min_check()
1062 ASSERT_EQ(rgn->base, memblock_start_of_DRAM()); in alloc_try_nid_bottom_up_cap_min_check()
1228 * A test that tries to allocate a memory region in a specific NUMA node that
1229 * has enough memory to allocate a region of the requested size.
1230 * Expect to allocate an aligned region at the end of the requested node.
1236 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_simple_check()
1245 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_top_down_numa_simple_check()
1246 size = req_node->size / SZ_4; in alloc_try_nid_top_down_numa_simple_check()
1256 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_simple_check()
1257 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_try_nid_top_down_numa_simple_check()
1258 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_simple_check()
1269 * A test that tries to allocate a memory region in a specific NUMA node that
1270 * does not have enough memory to allocate a region of the requested size:
1272 * | +-----+ +------------------+ |
1274 * +---+-----+----------+------------------+-----+
1276 * | +---------+ |
1278 * +-----------------------------+---------+-----+
1280 * Expect to allocate an aligned region at the end of the last node that has
1281 * enough memory (in this case, nid = 6) after falling back to NUMA_NO_NODE.
1288 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_small_node_check()
1289 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_small_node_check()
1298 size = SZ_2 * req_node->size; in alloc_try_nid_top_down_numa_small_node_check()
1308 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_small_node_check()
1309 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_try_nid_top_down_numa_small_node_check()
1310 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_small_node_check()
1321 * A test that tries to allocate a memory region in a specific NUMA node that
1324 * | +---------+ +------------------+ |
1326 * +--------------+---------+------------+------------------+-----+
1328 * | +---------+ +---------+ |
1330 * +--------------+---------+---------------------+---------+-----+
1332 * Expect to allocate an aligned region at the end of the last node that is
1333 * large enough and has enough unreserved memory (in this case, nid = 6) after
1334 * falling back to NUMA_NO_NODE. The region count and total size get updated.
1341 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_node_reserved_check()
1342 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_node_reserved_check()
1351 size = req_node->size; in alloc_try_nid_top_down_numa_node_reserved_check()
1355 memblock_reserve(req_node->base, req_node->size); in alloc_try_nid_top_down_numa_node_reserved_check()
1362 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_node_reserved_check()
1363 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_try_nid_top_down_numa_node_reserved_check()
1364 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_node_reserved_check()
1367 ASSERT_EQ(memblock.reserved.total_size, size + req_node->size); in alloc_try_nid_top_down_numa_node_reserved_check()
1375 * A test that tries to allocate a memory region in a specific NUMA node that
1376 * is partially reserved but has enough memory for the allocated region:
1378 * | +---------------------------------------+ |
1380 * +-----------+---------------------------------------+----------+
1382 * | +------------------+ +-----+ |
1384 * +-----------+------------------+--------------+-----+----------+
1386 * Expect to allocate an aligned region at the end of the requested node. The
1387 * region count and total size get updated.
1393 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_part_reserved_check()
1395 struct region r1; in alloc_try_nid_top_down_numa_part_reserved_check()
1403 ASSERT_LE(SZ_8, req_node->size); in alloc_try_nid_top_down_numa_part_reserved_check()
1404 r1.base = req_node->base; in alloc_try_nid_top_down_numa_part_reserved_check()
1405 r1.size = req_node->size / SZ_2; in alloc_try_nid_top_down_numa_part_reserved_check()
1417 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_part_reserved_check()
1418 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_try_nid_top_down_numa_part_reserved_check()
1419 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_part_reserved_check()
1430 * A test that tries to allocate a memory region in a specific NUMA node that
1431 * is partially reserved and does not have enough contiguous memory for the
1432 * allocated region:
1434 * | +-----------------------+ +----------------------|
1436 * +-----------+-----------------------+---------+----------------------+
1438 * | +----------+ +-----------|
1440 * +-----------------+----------+---------------------------+-----------+
1442 * Expect to allocate an aligned region at the end of the last node that is
1443 * large enough and has enough unreserved memory (in this case,
1444 * nid = NUMA_NODES - 1) after falling back to NUMA_NO_NODE. The region count
1450 int nid_exp = NUMA_NODES - 1; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1452 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1453 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1455 struct region r1; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1463 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1464 size = req_node->size / SZ_2; in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1465 r1.base = req_node->base + (size / SZ_2); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1478 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1479 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1480 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_part_reserved_fallback_check()
1491 * A test that tries to allocate a memory region that spans over the min_addr
1499 * | +-----------------------+-----------+ |
1501 * +-----------+-----------------------+-----------+--------------+
1503 * | +-----------+ |
1505 * +-----------------------+-----------+--------------------------+
1507 * Expect to drop the lower limit and allocate a memory region that ends at
1514 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_split_range_low_check()
1525 min_addr = req_node_end - SZ_256; in alloc_try_nid_top_down_numa_split_range_low_check()
1534 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_split_range_low_check()
1535 ASSERT_EQ(new_rgn->base, req_node_end - size); in alloc_try_nid_top_down_numa_split_range_low_check()
1536 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_split_range_low_check()
1547 * A test that tries to allocate a memory region that spans over the min_addr
1555 * | +--------------------------+---------+ |
1557 * +------+--------------------------+---------+----------------+
1559 * | +---------+ |
1561 * +-----------------------+---------+--------------------------+
1563 * Expect to drop the lower limit and allocate a memory region that
1569 int nid_exp = nid_req - 1; in alloc_try_nid_top_down_numa_split_range_high_check()
1571 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_top_down_numa_split_range_high_check()
1582 min_addr = exp_node_end - SZ_256; in alloc_try_nid_top_down_numa_split_range_high_check()
1591 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_split_range_high_check()
1592 ASSERT_EQ(new_rgn->base, exp_node_end - size); in alloc_try_nid_top_down_numa_split_range_high_check()
1593 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_split_range_high_check()
1604 * A test that tries to allocate a memory region that spans over the min_addr
1612 * | +---------------+ +-------------+---------+ |
1614 * +----+---------------+--------+-------------+---------+----------+
1616 * | +---------+ |
1618 * +----------+---------+-------------------------------------------+
1620 * Expect to drop the lower limit and allocate a memory region that ends at
1627 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1628 struct memblock_region *node2 = &memblock.memory.regions[6]; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1638 min_addr = node2->base - SZ_256; in alloc_try_nid_top_down_numa_no_overlap_split_check()
1647 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1648 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1649 ASSERT_LE(req_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_no_overlap_split_check()
1660 * A test that tries to allocate memory within min_addr and max_add range when
1669 * |-----------+ +----------+----...----+----------+ |
1671 * +-----------+-----------+----------+----...----+----------+------+
1673 * | +-----+ |
1675 * +---------------------------------------------------+-----+------+
1677 * Expect to allocate a memory region at the end of the final node in
1684 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1685 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1694 min_addr = min_node->base; in alloc_try_nid_top_down_numa_no_overlap_low_check()
1703 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1704 ASSERT_EQ(new_rgn->base, max_addr - size); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1705 ASSERT_LE(max_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_no_overlap_low_check()
1716 * A test that tries to allocate memory within min_addr and max_add range when
1725 * | +----------+----...----+----------+ +-----------+ |
1727 * +-----+----------+----...----+----------+--------+-----------+---+
1729 * | +-----+ |
1731 * +---------------------------------+-----+------------------------+
1733 * Expect to allocate a memory region at the end of the final node in
1740 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1741 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1750 min_addr = min_node->base; in alloc_try_nid_top_down_numa_no_overlap_high_check()
1759 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1760 ASSERT_EQ(new_rgn->base, max_addr - size); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1761 ASSERT_LE(max_node->base, new_rgn->base); in alloc_try_nid_top_down_numa_no_overlap_high_check()
1772 * A test that tries to allocate a memory region in a specific NUMA node that
1773 * has enough memory to allocate a region of the requested size.
1774 * Expect to allocate an aligned region at the beginning of the requested node.
1780 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_simple_check()
1789 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_bottom_up_numa_simple_check()
1790 size = req_node->size / SZ_4; in alloc_try_nid_bottom_up_numa_simple_check()
1800 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_simple_check()
1801 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_simple_check()
1813 * A test that tries to allocate a memory region in a specific NUMA node that
1814 * does not have enough memory to allocate a region of the requested size:
1816 * |----------------------+-----+ |
1818 * +----------------------+-----+----------------+
1820 * |---------+ |
1822 * +---------+-----------------------------------+
1824 * Expect to allocate an aligned region at the beginning of the first node that
1825 * has enough memory (in this case, nid = 0) after falling back to NUMA_NO_NODE.
1832 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_small_node_check()
1833 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_small_node_check()
1842 size = SZ_2 * req_node->size; in alloc_try_nid_bottom_up_numa_small_node_check()
1852 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_small_node_check()
1853 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_small_node_check()
1865 * A test that tries to allocate a memory region in a specific NUMA node that
1868 * |----------------------+ +-----------+ |
1870 * +----------------------+-----+-----------+--------------------+
1872 * |-----------+ +-----------+ |
1874 * +-----------+----------------+-----------+--------------------+
1876 * Expect to allocate an aligned region at the beginning of the first node that
1877 * is large enough and has enough unreserved memory (in this case, nid = 0)
1878 * after falling back to NUMA_NO_NODE. The region count and total size get
1886 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1887 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1896 size = req_node->size; in alloc_try_nid_bottom_up_numa_node_reserved_check()
1900 memblock_reserve(req_node->base, req_node->size); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1907 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1908 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1912 ASSERT_EQ(memblock.reserved.total_size, size + req_node->size); in alloc_try_nid_bottom_up_numa_node_reserved_check()
1920 * A test that tries to allocate a memory region in a specific NUMA node that
1921 * is partially reserved but has enough memory for the allocated region:
1923 * | +---------------------------------------+ |
1925 * +-----------+---------------------------------------+---------+
1927 * | +------------------+-----+ |
1929 * +-----------+------------------+-----+------------------------+
1931 * Expect to allocate an aligned region in the requested node that merges with
1932 * the existing reserved region. The total size gets updated.
1938 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1940 struct region r1; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1949 ASSERT_LE(SZ_8, req_node->size); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1950 r1.base = req_node->base; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1951 r1.size = req_node->size / SZ_2; in alloc_try_nid_bottom_up_numa_part_reserved_check()
1964 ASSERT_EQ(new_rgn->size, total_size); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1965 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_part_reserved_check()
1977 * A test that tries to allocate a memory region in a specific NUMA node that
1978 * is partially reserved and does not have enough contiguous memory for the
1979 * allocated region:
1981 * |----------------------+ +-----------------------+ |
1983 * +----------------------+-------+-----------------------+---------+
1985 * |-----------+ +----------+ |
1987 * +-----------+------------------------+----------+----------------+
1989 * Expect to allocate an aligned region at the beginning of the first
1990 * node that is large enough and has enough unreserved memory (in this case,
1991 * nid = 0) after falling back to NUMA_NO_NODE. The region count and total size
1999 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2000 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2002 struct region r1; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2010 ASSERT_LE(SZ_4, req_node->size); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2011 size = req_node->size / SZ_2; in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2012 r1.base = req_node->base + (size / SZ_2); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2025 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2026 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_part_reserved_fallback_check()
2038 * A test that tries to allocate a memory region that spans over the min_addr
2046 * | +-----------------------+-----------+ |
2048 * +-----------+-----------------------+-----------+--------------+
2050 * | +-----------+ |
2052 * +-----------+-----------+--------------------------------------+
2054 * Expect to drop the lower limit and allocate a memory region at the beginning
2061 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_split_range_low_check()
2072 min_addr = req_node_end - SZ_256; in alloc_try_nid_bottom_up_numa_split_range_low_check()
2081 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_split_range_low_check()
2082 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_split_range_low_check()
2094 * A test that tries to allocate a memory region that spans over the min_addr
2102 * |------------------+ +----------------------+---------+ |
2104 * +------------------+--------+----------------------+---------+------+
2106 * |---------+ |
2108 * +---------+---------------------------------------------------------+
2110 * Expect to drop the lower limit and allocate a memory region at the beginning
2111 * of the first node that has enough memory.
2118 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2119 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2130 min_addr = req_node->base - SZ_256; in alloc_try_nid_bottom_up_numa_split_range_high_check()
2139 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_split_range_high_check()
2140 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_try_nid_bottom_up_numa_split_range_high_check()
2152 * A test that tries to allocate a memory region that spans over the min_addr
2160 * | +---------------+ +-------------+---------+ |
2162 * +----+---------------+--------+-------------+---------+---------+
2164 * | +---------+ |
2166 * +----+---------+------------------------------------------------+
2168 * Expect to drop the lower limit and allocate a memory region that starts at
2175 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2176 struct memblock_region *node2 = &memblock.memory.regions[6]; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2186 min_addr = node2->base - SZ_256; in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2195 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2196 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_try_nid_bottom_up_numa_no_overlap_split_check()
2208 * A test that tries to allocate memory within min_addr and max_add range when
2217 * |-----------+ +----------+----...----+----------+ |
2219 * +-----------+-----------+----------+----...----+----------+------+
2221 * | +-----+ |
2223 * +-----------------------+-----+----------------------------------+
2225 * Expect to allocate a memory region at the beginning of the first node
2232 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2233 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2242 min_addr = min_node->base; in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2251 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2252 ASSERT_EQ(new_rgn->base, min_addr); in alloc_try_nid_bottom_up_numa_no_overlap_low_check()
2264 * A test that tries to allocate memory within min_addr and max_add range when
2273 * | +----------+----...----+----------+ +---------+ |
2275 * +-----+----------+----...----+----------+---------+---------+---+
2277 * | +-----+ |
2279 * +-----+-----+---------------------------------------------------+
2281 * Expect to allocate a memory region at the beginning of the first node
2288 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2289 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2298 min_addr = min_node->base; in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2307 ASSERT_EQ(new_rgn->size, size); in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2308 ASSERT_EQ(new_rgn->base, min_addr); in alloc_try_nid_bottom_up_numa_no_overlap_high_check()
2320 * A test that tries to allocate a memory region in a specific NUMA node that
2321 * does not have enough memory to allocate a region of the requested size.
2322 * Additionally, none of the nodes have enough memory to allocate the region:
2324 * +-----------------------------------+
2326 * +-----------------------------------+
2327 * |-------+-------+-------+-------+-------+-------+-------+-------|
2329 * +-------+-------+-------+-------+-------+-------+-------+-------+
2357 * A test that tries to allocate memory within min_addr and max_addr range when
2359 * min_addr and ends at max_addr and is the same size as the region to be
2366 * | +-----------+-----------------------+-----------------------|
2368 * +------+-----------+-----------------------+-----------------------+
2370 * | +----+-----------------------+----+ |
2372 * +-------------+----+-----------------------+----+------------------+
2374 * Expect to merge all of the regions into one. The region counter and total
2382 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2383 struct memblock_region *next_node = &memblock.memory.regions[nid_next]; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2385 struct region r1, r2; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2386 phys_addr_t size = req_node->size; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2394 r1.base = next_node->base; in alloc_try_nid_numa_reserved_full_merge_generic_check()
2398 r2.base = r1.base - (size + r2.size); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2413 ASSERT_EQ(new_rgn->size, total_size); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2414 ASSERT_EQ(new_rgn->base, r2.base); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2416 ASSERT_LE(new_rgn->base, req_node->base); in alloc_try_nid_numa_reserved_full_merge_generic_check()
2428 * A test that tries to allocate memory within min_addr and max_add range,
2429 * where the total range can fit the region, but it is split between two nodes
2433 * +-----------+
2435 * +-----------+
2436 * | +---------------------+-----------|
2438 * +------+---------------------+-----------+
2440 * |----------------------+ +-----|
2442 * +----------------------+-----------+-----+
2454 struct memblock_region *next_node = &memblock.memory.regions[7]; in alloc_try_nid_numa_split_all_reserved_generic_check()
2455 struct region r1, r2; in alloc_try_nid_numa_split_all_reserved_generic_check()
2463 r2.base = next_node->base + SZ_128; in alloc_try_nid_numa_split_all_reserved_generic_check()
2464 r2.size = memblock_end_of_DRAM() - r2.base; in alloc_try_nid_numa_split_all_reserved_generic_check()
2466 r1.size = MEM_SIZE - (r2.size + size); in alloc_try_nid_numa_split_all_reserved_generic_check()