Lines Matching +full:memory +full:- +full:region
1 // SPDX-License-Identifier: GPL-2.0-or-later
51 * A simple test that tries to allocate a memory region within min_addr and
55 * | + +-----------+ |
57 * +----+-------+-----------+------+
62 * Expect to allocate a region that ends at max_addr.
82 rgn_end = rgn->base + rgn->size; in alloc_nid_top_down_simple_check()
87 ASSERT_EQ(rgn->size, size); in alloc_nid_top_down_simple_check()
88 ASSERT_EQ(rgn->base, max_addr - size); in alloc_nid_top_down_simple_check()
100 * A simple test that tries to allocate a memory region within min_addr and
104 * | + +---------+ + |
106 * +------+-------+---------+--+----+
114 * Expect to allocate an aligned region that ends before max_addr.
135 rgn_end = rgn->base + rgn->size; in alloc_nid_top_down_end_misaligned_check()
140 ASSERT_EQ(rgn->size, size); in alloc_nid_top_down_end_misaligned_check()
141 ASSERT_EQ(rgn->base, max_addr - size - misalign); in alloc_nid_top_down_end_misaligned_check()
153 * A simple test that tries to allocate a memory region, which spans over the
157 * | +---------------+ |
159 * +------+---------------+-------+
164 * Expect to allocate a region that starts at min_addr and ends at
185 rgn_end = rgn->base + rgn->size; in alloc_nid_exact_address_generic_check()
190 ASSERT_EQ(rgn->size, size); in alloc_nid_exact_address_generic_check()
191 ASSERT_EQ(rgn->base, min_addr); in alloc_nid_exact_address_generic_check()
203 * A test that tries to allocate a memory region, which can't fit into
207 * | +----------+-----+ |
209 * +--------+----------+-----+----+
216 * Expect to drop the lower limit and allocate a memory region which
240 ASSERT_EQ(rgn->size, size); in alloc_nid_top_down_narrow_range_check()
241 ASSERT_EQ(rgn->base, max_addr - size); in alloc_nid_top_down_narrow_range_check()
252 * A test that tries to allocate a memory region, which can't fit into
254 * of the available memory:
256 * +-------------+
258 * +-------------+
262 * +-------+--------------+
296 * A test that tries to allocate a memory region within min_addr min_addr range,
297 * with min_addr being so close that it's next to an allocated region:
300 * | +--------+---------------|
302 * +-------+--------+---------------+
307 * Expect a merge of both regions. Only the region size gets updated.
324 min_addr = max_addr - r2_size; in alloc_nid_min_reserved_generic_check()
325 reserved_base = min_addr - r1_size; in alloc_nid_min_reserved_generic_check()
336 ASSERT_EQ(rgn->size, total_size); in alloc_nid_min_reserved_generic_check()
337 ASSERT_EQ(rgn->base, reserved_base); in alloc_nid_min_reserved_generic_check()
348 * A test that tries to allocate a memory region within min_addr and max_addr,
349 * with max_addr being so close that it's next to an allocated region:
352 * | +-------------+--------|
354 * +----------+-------------+--------+
359 * Expect a merge of regions. Only the region size gets updated.
374 max_addr = memblock_end_of_DRAM() - r1_size; in alloc_nid_max_reserved_generic_check()
375 min_addr = max_addr - r2_size; in alloc_nid_max_reserved_generic_check()
386 ASSERT_EQ(rgn->size, total_size); in alloc_nid_max_reserved_generic_check()
387 ASSERT_EQ(rgn->base, min_addr); in alloc_nid_max_reserved_generic_check()
398 * A test that tries to allocate memory within min_addr and max_add range, when
400 * a new region:
403 * | +--------+ +-------+------+ |
405 * +----+--------+---+-------+------+--+
410 * Expect to merge the new region with r1. The second region does not get
419 struct region r1, r2; in alloc_nid_top_down_reserved_with_space_check()
429 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_nid_top_down_reserved_with_space_check()
433 r2.base = r1.base - (r3_size + gap_size + r2.size); in alloc_nid_top_down_reserved_with_space_check()
449 ASSERT_EQ(rgn1->size, r1.size + r3_size); in alloc_nid_top_down_reserved_with_space_check()
450 ASSERT_EQ(rgn1->base, max_addr - r3_size); in alloc_nid_top_down_reserved_with_space_check()
452 ASSERT_EQ(rgn2->size, r2.size); in alloc_nid_top_down_reserved_with_space_check()
453 ASSERT_EQ(rgn2->base, r2.base); in alloc_nid_top_down_reserved_with_space_check()
464 * A test that tries to allocate memory within min_addr and max_add range, when
466 * the size of the new region:
469 * | +--------+--------+--------+ |
471 * +-----+--------+--------+--------+-----+
476 * Expect to merge all of the regions into one. The region counter and total
483 struct region r1, r2; in alloc_nid_reserved_full_merge_generic_check()
492 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_nid_reserved_full_merge_generic_check()
496 r2.base = r1.base - (r3_size + r2.size); in alloc_nid_reserved_full_merge_generic_check()
512 ASSERT_EQ(rgn->size, total_size); in alloc_nid_reserved_full_merge_generic_check()
513 ASSERT_EQ(rgn->base, r2.base); in alloc_nid_reserved_full_merge_generic_check()
524 * A test that tries to allocate memory within min_addr and max_add range, when
526 * a new region:
529 * | +----------+------+ +------+ |
531 * +--+----------+------+----+------+---+
538 * Expect to merge the new region with r2. The second region does not get
546 struct region r1, r2; in alloc_nid_top_down_reserved_no_space_check()
556 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_nid_top_down_reserved_no_space_check()
560 r2.base = r1.base - (r2.size + gap_size); in alloc_nid_top_down_reserved_no_space_check()
576 ASSERT_EQ(rgn1->size, r1.size); in alloc_nid_top_down_reserved_no_space_check()
577 ASSERT_EQ(rgn1->base, r1.base); in alloc_nid_top_down_reserved_no_space_check()
579 ASSERT_EQ(rgn2->size, r2.size + r3_size); in alloc_nid_top_down_reserved_no_space_check()
580 ASSERT_EQ(rgn2->base, r2.base - r3_size); in alloc_nid_top_down_reserved_no_space_check()
591 * A test that tries to allocate memory within min_addr and max_add range, but
594 * +-----------+
596 * +-----------+
598 * |--------------+ +----------|
600 * +--------------+------+----------+
613 struct region r1, r2; in alloc_nid_reserved_all_generic_check()
622 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES; in alloc_nid_reserved_all_generic_check()
625 r2.size = MEM_SIZE - (r1.size + gap_size); in alloc_nid_reserved_all_generic_check()
646 * A test that tries to allocate a memory region, where max_addr is
647 * bigger than the end address of the available memory. Expect to allocate
648 * a region that ends before the end of the memory.
661 min_addr = memblock_end_of_DRAM() - SZ_1K; in alloc_nid_top_down_cap_max_check()
671 ASSERT_EQ(rgn->size, size); in alloc_nid_top_down_cap_max_check()
672 ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size); in alloc_nid_top_down_cap_max_check()
683 * A test that tries to allocate a memory region, where min_addr is
684 * smaller than the start address of the available memory. Expect to allocate
685 * a region that ends before the end of the memory.
698 min_addr = memblock_start_of_DRAM() - SZ_256; in alloc_nid_top_down_cap_min_check()
708 ASSERT_EQ(rgn->size, size); in alloc_nid_top_down_cap_min_check()
709 ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size); in alloc_nid_top_down_cap_min_check()
720 * A simple test that tries to allocate a memory region within min_addr and
724 * | +-----------+ | |
726 * +----+-----------+-----------+------+
731 * Expect to allocate a region that ends before max_addr.
751 rgn_end = rgn->base + rgn->size; in alloc_nid_bottom_up_simple_check()
756 ASSERT_EQ(rgn->size, size); in alloc_nid_bottom_up_simple_check()
757 ASSERT_EQ(rgn->base, min_addr); in alloc_nid_bottom_up_simple_check()
769 * A simple test that tries to allocate a memory region within min_addr and
773 * | + +-----------+ + |
775 * +-----+---+-----------+-----+-----+
776 * ^ ^----. ^
783 * Expect to allocate an aligned region that ends before max_addr.
804 rgn_end = rgn->base + rgn->size; in alloc_nid_bottom_up_start_misaligned_check()
809 ASSERT_EQ(rgn->size, size); in alloc_nid_bottom_up_start_misaligned_check()
810 ASSERT_EQ(rgn->base, min_addr + (SMP_CACHE_BYTES - misalign)); in alloc_nid_bottom_up_start_misaligned_check()
822 * A test that tries to allocate a memory region, which can't fit into min_addr
826 * |---------+ + + |
828 * +---------+---------+----+------+
835 * Expect to drop the lower limit and allocate a memory region which
836 * starts at the beginning of the available memory.
859 ASSERT_EQ(rgn->size, size); in alloc_nid_bottom_up_narrow_range_check()
860 ASSERT_EQ(rgn->base, memblock_start_of_DRAM()); in alloc_nid_bottom_up_narrow_range_check()
871 * A test that tries to allocate memory within min_addr and max_add range, when
873 * a new region:
876 * | +--------+-------+ +------+ |
878 * +----+--------+-------+---+------+--+
883 * Expect to merge the new region with r2. The second region does not get
892 struct region r1, r2; in alloc_nid_bottom_up_reserved_with_space_check()
902 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_nid_bottom_up_reserved_with_space_check()
906 r2.base = r1.base - (r3_size + gap_size + r2.size); in alloc_nid_bottom_up_reserved_with_space_check()
922 ASSERT_EQ(rgn1->size, r1.size); in alloc_nid_bottom_up_reserved_with_space_check()
923 ASSERT_EQ(rgn1->base, max_addr); in alloc_nid_bottom_up_reserved_with_space_check()
925 ASSERT_EQ(rgn2->size, r2.size + r3_size); in alloc_nid_bottom_up_reserved_with_space_check()
926 ASSERT_EQ(rgn2->base, r2.base); in alloc_nid_bottom_up_reserved_with_space_check()
937 * A test that tries to allocate memory within min_addr and max_add range, when
939 * the size of the new region:
942 * |----------+ +------+ +----+ |
944 * +----------+----+------+---+----+--+
951 * Expect to drop the lower limit and allocate memory at the beginning of the
952 * available memory. The region counter and total size fields get updated.
962 struct region r1, r2; in alloc_nid_bottom_up_reserved_no_space_check()
972 r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2; in alloc_nid_bottom_up_reserved_no_space_check()
976 r2.base = r1.base - (r2.size + gap_size); in alloc_nid_bottom_up_reserved_no_space_check()
992 ASSERT_EQ(rgn3->size, r3_size); in alloc_nid_bottom_up_reserved_no_space_check()
993 ASSERT_EQ(rgn3->base, memblock_start_of_DRAM()); in alloc_nid_bottom_up_reserved_no_space_check()
995 ASSERT_EQ(rgn2->size, r2.size); in alloc_nid_bottom_up_reserved_no_space_check()
996 ASSERT_EQ(rgn2->base, r2.base); in alloc_nid_bottom_up_reserved_no_space_check()
998 ASSERT_EQ(rgn1->size, r1.size); in alloc_nid_bottom_up_reserved_no_space_check()
999 ASSERT_EQ(rgn1->base, r1.base); in alloc_nid_bottom_up_reserved_no_space_check()
1010 * A test that tries to allocate a memory region, where max_addr is
1011 * bigger than the end address of the available memory. Expect to allocate
1012 * a region that starts at the min_addr.
1035 ASSERT_EQ(rgn->size, size); in alloc_nid_bottom_up_cap_max_check()
1036 ASSERT_EQ(rgn->base, min_addr); in alloc_nid_bottom_up_cap_max_check()
1047 * A test that tries to allocate a memory region, where min_addr is
1048 * smaller than the start address of the available memory. Expect to allocate
1049 * a region at the beginning of the available memory.
1063 max_addr = memblock_end_of_DRAM() - SZ_256; in alloc_nid_bottom_up_cap_min_check()
1072 ASSERT_EQ(rgn->size, size); in alloc_nid_bottom_up_cap_min_check()
1073 ASSERT_EQ(rgn->base, memblock_start_of_DRAM()); in alloc_nid_bottom_up_cap_min_check()
1239 * A test that tries to allocate a memory region in a specific NUMA node that
1240 * has enough memory to allocate a region of the requested size.
1241 * Expect to allocate an aligned region at the end of the requested node.
1247 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_simple_check()
1256 ASSERT_LE(SZ_4, req_node->size); in alloc_nid_top_down_numa_simple_check()
1257 size = req_node->size / SZ_4; in alloc_nid_top_down_numa_simple_check()
1267 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_simple_check()
1268 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_nid_top_down_numa_simple_check()
1269 ASSERT_LE(req_node->base, new_rgn->base); in alloc_nid_top_down_numa_simple_check()
1280 * A test that tries to allocate a memory region in a specific NUMA node that
1281 * does not have enough memory to allocate a region of the requested size:
1283 * | +-----+ +------------------+ |
1285 * +---+-----+----------+------------------+-----+
1287 * | +---------+ |
1289 * +-----------------------------+---------+-----+
1291 * Expect to allocate an aligned region at the end of the last node that has
1292 * enough memory (in this case, nid = 6) after falling back to NUMA_NO_NODE.
1299 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_small_node_check()
1300 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_top_down_numa_small_node_check()
1309 size = SZ_2 * req_node->size; in alloc_nid_top_down_numa_small_node_check()
1319 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_small_node_check()
1320 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_nid_top_down_numa_small_node_check()
1321 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_nid_top_down_numa_small_node_check()
1332 * A test that tries to allocate a memory region in a specific NUMA node that
1335 * | +---------+ +------------------+ |
1337 * +--------------+---------+------------+------------------+-----+
1339 * | +---------+ +---------+ |
1341 * +--------------+---------+---------------------+---------+-----+
1343 * Expect to allocate an aligned region at the end of the last node that is
1344 * large enough and has enough unreserved memory (in this case, nid = 6) after
1345 * falling back to NUMA_NO_NODE. The region count and total size get updated.
1352 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_node_reserved_check()
1353 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_top_down_numa_node_reserved_check()
1362 size = req_node->size; in alloc_nid_top_down_numa_node_reserved_check()
1366 memblock_reserve(req_node->base, req_node->size); in alloc_nid_top_down_numa_node_reserved_check()
1373 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_node_reserved_check()
1374 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_nid_top_down_numa_node_reserved_check()
1375 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_nid_top_down_numa_node_reserved_check()
1378 ASSERT_EQ(memblock.reserved.total_size, size + req_node->size); in alloc_nid_top_down_numa_node_reserved_check()
1386 * A test that tries to allocate a memory region in a specific NUMA node that
1387 * is partially reserved but has enough memory for the allocated region:
1389 * | +---------------------------------------+ |
1391 * +-----------+---------------------------------------+----------+
1393 * | +------------------+ +-----+ |
1395 * +-----------+------------------+--------------+-----+----------+
1397 * Expect to allocate an aligned region at the end of the requested node. The
1398 * region count and total size get updated.
1404 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_part_reserved_check()
1406 struct region r1; in alloc_nid_top_down_numa_part_reserved_check()
1414 ASSERT_LE(SZ_8, req_node->size); in alloc_nid_top_down_numa_part_reserved_check()
1415 r1.base = req_node->base; in alloc_nid_top_down_numa_part_reserved_check()
1416 r1.size = req_node->size / SZ_2; in alloc_nid_top_down_numa_part_reserved_check()
1428 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_part_reserved_check()
1429 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_nid_top_down_numa_part_reserved_check()
1430 ASSERT_LE(req_node->base, new_rgn->base); in alloc_nid_top_down_numa_part_reserved_check()
1441 * A test that tries to allocate a memory region in a specific NUMA node that
1442 * is partially reserved and does not have enough contiguous memory for the
1443 * allocated region:
1445 * | +-----------------------+ +----------------------|
1447 * +-----------+-----------------------+---------+----------------------+
1449 * | +----------+ +-----------|
1451 * +-----------------+----------+---------------------------+-----------+
1453 * Expect to allocate an aligned region at the end of the last node that is
1454 * large enough and has enough unreserved memory (in this case,
1455 * nid = NUMA_NODES - 1) after falling back to NUMA_NO_NODE. The region count
1461 int nid_exp = NUMA_NODES - 1; in alloc_nid_top_down_numa_part_reserved_fallback_check()
1463 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_part_reserved_fallback_check()
1464 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_top_down_numa_part_reserved_fallback_check()
1466 struct region r1; in alloc_nid_top_down_numa_part_reserved_fallback_check()
1474 ASSERT_LE(SZ_4, req_node->size); in alloc_nid_top_down_numa_part_reserved_fallback_check()
1475 size = req_node->size / SZ_2; in alloc_nid_top_down_numa_part_reserved_fallback_check()
1476 r1.base = req_node->base + (size / SZ_2); in alloc_nid_top_down_numa_part_reserved_fallback_check()
1489 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_part_reserved_fallback_check()
1490 ASSERT_EQ(new_rgn->base, region_end(exp_node) - size); in alloc_nid_top_down_numa_part_reserved_fallback_check()
1491 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_nid_top_down_numa_part_reserved_fallback_check()
1502 * A test that tries to allocate a memory region that spans over the min_addr
1510 * | +-----------------------+-----------+ |
1512 * +-----------+-----------------------+-----------+--------------+
1514 * | +-----------+ |
1516 * +-----------------------+-----------+--------------------------+
1518 * Expect to drop the lower limit and allocate a memory region that ends at
1525 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_split_range_low_check()
1536 min_addr = req_node_end - SZ_256; in alloc_nid_top_down_numa_split_range_low_check()
1545 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_split_range_low_check()
1546 ASSERT_EQ(new_rgn->base, req_node_end - size); in alloc_nid_top_down_numa_split_range_low_check()
1547 ASSERT_LE(req_node->base, new_rgn->base); in alloc_nid_top_down_numa_split_range_low_check()
1558 * A test that tries to allocate a memory region that spans over the min_addr
1566 * | +--------------------------+---------+ |
1568 * +------+--------------------------+---------+----------------+
1570 * | +---------+ |
1572 * +-----------------------+---------+--------------------------+
1574 * Expect to drop the lower limit and allocate a memory region that
1580 int nid_exp = nid_req - 1; in alloc_nid_top_down_numa_split_range_high_check()
1582 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_top_down_numa_split_range_high_check()
1593 min_addr = exp_node_end - SZ_256; in alloc_nid_top_down_numa_split_range_high_check()
1602 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_split_range_high_check()
1603 ASSERT_EQ(new_rgn->base, exp_node_end - size); in alloc_nid_top_down_numa_split_range_high_check()
1604 ASSERT_LE(exp_node->base, new_rgn->base); in alloc_nid_top_down_numa_split_range_high_check()
1615 * A test that tries to allocate a memory region that spans over the min_addr
1623 * | +---------------+ +-------------+---------+ |
1625 * +----+---------------+--------+-------------+---------+----------+
1627 * | +---------+ |
1629 * +----------+---------+-------------------------------------------+
1631 * Expect to drop the lower limit and allocate a memory region that ends at
1638 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_top_down_numa_no_overlap_split_check()
1639 struct memblock_region *node2 = &memblock.memory.regions[6]; in alloc_nid_top_down_numa_no_overlap_split_check()
1649 min_addr = node2->base - SZ_256; in alloc_nid_top_down_numa_no_overlap_split_check()
1658 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_no_overlap_split_check()
1659 ASSERT_EQ(new_rgn->base, region_end(req_node) - size); in alloc_nid_top_down_numa_no_overlap_split_check()
1660 ASSERT_LE(req_node->base, new_rgn->base); in alloc_nid_top_down_numa_no_overlap_split_check()
1671 * A test that tries to allocate memory within min_addr and max_add range when
1680 * |-----------+ +----------+----...----+----------+ |
1682 * +-----------+-----------+----------+----...----+----------+------+
1684 * | +-----+ |
1686 * +---------------------------------------------------+-----+------+
1688 * Expect to allocate a memory region at the end of the final node in
1695 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_nid_top_down_numa_no_overlap_low_check()
1696 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_nid_top_down_numa_no_overlap_low_check()
1705 min_addr = min_node->base; in alloc_nid_top_down_numa_no_overlap_low_check()
1714 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_no_overlap_low_check()
1715 ASSERT_EQ(new_rgn->base, max_addr - size); in alloc_nid_top_down_numa_no_overlap_low_check()
1716 ASSERT_LE(max_node->base, new_rgn->base); in alloc_nid_top_down_numa_no_overlap_low_check()
1727 * A test that tries to allocate memory within min_addr and max_add range when
1736 * | +----------+----...----+----------+ +-----------+ |
1738 * +-----+----------+----...----+----------+--------+-----------+---+
1740 * | +-----+ |
1742 * +---------------------------------+-----+------------------------+
1744 * Expect to allocate a memory region at the end of the final node in
1751 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_nid_top_down_numa_no_overlap_high_check()
1752 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_nid_top_down_numa_no_overlap_high_check()
1761 min_addr = min_node->base; in alloc_nid_top_down_numa_no_overlap_high_check()
1770 ASSERT_EQ(new_rgn->size, size); in alloc_nid_top_down_numa_no_overlap_high_check()
1771 ASSERT_EQ(new_rgn->base, max_addr - size); in alloc_nid_top_down_numa_no_overlap_high_check()
1772 ASSERT_LE(max_node->base, new_rgn->base); in alloc_nid_top_down_numa_no_overlap_high_check()
1783 * A test that tries to allocate a memory region in a specific NUMA node that
1784 * has enough memory to allocate a region of the requested size.
1785 * Expect to allocate an aligned region at the beginning of the requested node.
1791 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_simple_check()
1800 ASSERT_LE(SZ_4, req_node->size); in alloc_nid_bottom_up_numa_simple_check()
1801 size = req_node->size / SZ_4; in alloc_nid_bottom_up_numa_simple_check()
1811 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_simple_check()
1812 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_nid_bottom_up_numa_simple_check()
1824 * A test that tries to allocate a memory region in a specific NUMA node that
1825 * does not have enough memory to allocate a region of the requested size:
1827 * |----------------------+-----+ |
1829 * +----------------------+-----+----------------+
1831 * |---------+ |
1833 * +---------+-----------------------------------+
1835 * Expect to allocate an aligned region at the beginning of the first node that
1836 * has enough memory (in this case, nid = 0) after falling back to NUMA_NO_NODE.
1843 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_small_node_check()
1844 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_bottom_up_numa_small_node_check()
1853 size = SZ_2 * req_node->size; in alloc_nid_bottom_up_numa_small_node_check()
1863 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_small_node_check()
1864 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_nid_bottom_up_numa_small_node_check()
1876 * A test that tries to allocate a memory region in a specific NUMA node that
1879 * |----------------------+ +-----------+ |
1881 * +----------------------+-----+-----------+--------------------+
1883 * |-----------+ +-----------+ |
1885 * +-----------+----------------+-----------+--------------------+
1887 * Expect to allocate an aligned region at the beginning of the first node that
1888 * is large enough and has enough unreserved memory (in this case, nid = 0)
1889 * after falling back to NUMA_NO_NODE. The region count and total size get
1897 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_node_reserved_check()
1898 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_bottom_up_numa_node_reserved_check()
1907 size = req_node->size; in alloc_nid_bottom_up_numa_node_reserved_check()
1911 memblock_reserve(req_node->base, req_node->size); in alloc_nid_bottom_up_numa_node_reserved_check()
1918 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_node_reserved_check()
1919 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_nid_bottom_up_numa_node_reserved_check()
1923 ASSERT_EQ(memblock.reserved.total_size, size + req_node->size); in alloc_nid_bottom_up_numa_node_reserved_check()
1931 * A test that tries to allocate a memory region in a specific NUMA node that
1932 * is partially reserved but has enough memory for the allocated region:
1934 * | +---------------------------------------+ |
1936 * +-----------+---------------------------------------+---------+
1938 * | +------------------+-----+ |
1940 * +-----------+------------------+-----+------------------------+
1942 * Expect to allocate an aligned region in the requested node that merges with
1943 * the existing reserved region. The total size gets updated.
1949 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_part_reserved_check()
1951 struct region r1; in alloc_nid_bottom_up_numa_part_reserved_check()
1960 ASSERT_LE(SZ_8, req_node->size); in alloc_nid_bottom_up_numa_part_reserved_check()
1961 r1.base = req_node->base; in alloc_nid_bottom_up_numa_part_reserved_check()
1962 r1.size = req_node->size / SZ_2; in alloc_nid_bottom_up_numa_part_reserved_check()
1975 ASSERT_EQ(new_rgn->size, total_size); in alloc_nid_bottom_up_numa_part_reserved_check()
1976 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_nid_bottom_up_numa_part_reserved_check()
1988 * A test that tries to allocate a memory region in a specific NUMA node that
1989 * is partially reserved and does not have enough contiguous memory for the
1990 * allocated region:
1992 * |----------------------+ +-----------------------+ |
1994 * +----------------------+-------+-----------------------+---------+
1996 * |-----------+ +----------+ |
1998 * +-----------+------------------------+----------+----------------+
2000 * Expect to allocate an aligned region at the beginning of the first
2001 * node that is large enough and has enough unreserved memory (in this case,
2002 * nid = 0) after falling back to NUMA_NO_NODE. The region count and total size
2010 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2011 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2013 struct region r1; in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2021 ASSERT_LE(SZ_4, req_node->size); in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2022 size = req_node->size / SZ_2; in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2023 r1.base = req_node->base + (size / SZ_2); in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2036 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2037 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_nid_bottom_up_numa_part_reserved_fallback_check()
2049 * A test that tries to allocate a memory region that spans over the min_addr
2057 * | +-----------------------+-----------+ |
2059 * +-----------+-----------------------+-----------+--------------+
2061 * | +-----------+ |
2063 * +-----------+-----------+--------------------------------------+
2065 * Expect to drop the lower limit and allocate a memory region at the beginning
2072 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_split_range_low_check()
2083 min_addr = req_node_end - SZ_256; in alloc_nid_bottom_up_numa_split_range_low_check()
2092 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_split_range_low_check()
2093 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_nid_bottom_up_numa_split_range_low_check()
2105 * A test that tries to allocate a memory region that spans over the min_addr
2113 * |------------------+ +----------------------+---------+ |
2115 * +------------------+--------+----------------------+---------+------+
2117 * |---------+ |
2119 * +---------+---------------------------------------------------------+
2121 * Expect to drop the lower limit and allocate a memory region at the beginning
2122 * of the first node that has enough memory.
2129 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_split_range_high_check()
2130 struct memblock_region *exp_node = &memblock.memory.regions[nid_exp]; in alloc_nid_bottom_up_numa_split_range_high_check()
2141 min_addr = req_node->base - SZ_256; in alloc_nid_bottom_up_numa_split_range_high_check()
2150 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_split_range_high_check()
2151 ASSERT_EQ(new_rgn->base, exp_node->base); in alloc_nid_bottom_up_numa_split_range_high_check()
2163 * A test that tries to allocate a memory region that spans over the min_addr
2171 * | +---------------+ +-------------+---------+ |
2173 * +----+---------------+--------+-------------+---------+---------+
2175 * | +---------+ |
2177 * +----+---------+------------------------------------------------+
2179 * Expect to drop the lower limit and allocate a memory region that starts at
2186 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_bottom_up_numa_no_overlap_split_check()
2187 struct memblock_region *node2 = &memblock.memory.regions[6]; in alloc_nid_bottom_up_numa_no_overlap_split_check()
2197 min_addr = node2->base - SZ_256; in alloc_nid_bottom_up_numa_no_overlap_split_check()
2206 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_no_overlap_split_check()
2207 ASSERT_EQ(new_rgn->base, req_node->base); in alloc_nid_bottom_up_numa_no_overlap_split_check()
2219 * A test that tries to allocate memory within min_addr and max_add range when
2228 * |-----------+ +----------+----...----+----------+ |
2230 * +-----------+-----------+----------+----...----+----------+------+
2232 * | +-----+ |
2234 * +-----------------------+-----+----------------------------------+
2236 * Expect to allocate a memory region at the beginning of the first node
2243 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_nid_bottom_up_numa_no_overlap_low_check()
2244 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_nid_bottom_up_numa_no_overlap_low_check()
2253 min_addr = min_node->base; in alloc_nid_bottom_up_numa_no_overlap_low_check()
2262 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_no_overlap_low_check()
2263 ASSERT_EQ(new_rgn->base, min_addr); in alloc_nid_bottom_up_numa_no_overlap_low_check()
2275 * A test that tries to allocate memory within min_addr and max_add range when
2284 * | +----------+----...----+----------+ +---------+ |
2286 * +-----+----------+----...----+----------+---------+---------+---+
2288 * | +-----+ |
2290 * +-----+-----+---------------------------------------------------+
2292 * Expect to allocate a memory region at the beginning of the first node
2299 struct memblock_region *min_node = &memblock.memory.regions[2]; in alloc_nid_bottom_up_numa_no_overlap_high_check()
2300 struct memblock_region *max_node = &memblock.memory.regions[5]; in alloc_nid_bottom_up_numa_no_overlap_high_check()
2309 min_addr = min_node->base; in alloc_nid_bottom_up_numa_no_overlap_high_check()
2318 ASSERT_EQ(new_rgn->size, size); in alloc_nid_bottom_up_numa_no_overlap_high_check()
2319 ASSERT_EQ(new_rgn->base, min_addr); in alloc_nid_bottom_up_numa_no_overlap_high_check()
2331 * A test that tries to allocate a memory region in a specific NUMA node that
2332 * does not have enough memory to allocate a region of the requested size.
2333 * Additionally, none of the nodes have enough memory to allocate the region:
2335 * +-----------------------------------+
2337 * +-----------------------------------+
2338 * |-------+-------+-------+-------+-------+-------+-------+-------|
2340 * +-------+-------+-------+-------+-------+-------+-------+-------+
2368 * A test that tries to allocate memory within min_addr and max_addr range when
2370 * min_addr and ends at max_addr and is the same size as the region to be
2377 * | +-----------+-----------------------+-----------------------|
2379 * +------+-----------+-----------------------+-----------------------+
2381 * | +----+-----------------------+----+ |
2383 * +-------------+----+-----------------------+----+------------------+
2385 * Expect to merge all of the regions into one. The region counter and total
2393 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_nid_numa_reserved_full_merge_generic_check()
2394 struct memblock_region *next_node = &memblock.memory.regions[nid_next]; in alloc_nid_numa_reserved_full_merge_generic_check()
2396 struct region r1, r2; in alloc_nid_numa_reserved_full_merge_generic_check()
2397 phys_addr_t size = req_node->size; in alloc_nid_numa_reserved_full_merge_generic_check()
2405 r1.base = next_node->base; in alloc_nid_numa_reserved_full_merge_generic_check()
2409 r2.base = r1.base - (size + r2.size); in alloc_nid_numa_reserved_full_merge_generic_check()
2424 ASSERT_EQ(new_rgn->size, total_size); in alloc_nid_numa_reserved_full_merge_generic_check()
2425 ASSERT_EQ(new_rgn->base, r2.base); in alloc_nid_numa_reserved_full_merge_generic_check()
2427 ASSERT_LE(new_rgn->base, req_node->base); in alloc_nid_numa_reserved_full_merge_generic_check()
2439 * A test that tries to allocate memory within min_addr and max_add range,
2440 * where the total range can fit the region, but it is split between two nodes
2444 * +-----------+
2446 * +-----------+
2447 * | +---------------------+-----------|
2449 * +------+---------------------+-----------+
2451 * |----------------------+ +-----|
2453 * +----------------------+-----------+-----+
2465 struct memblock_region *next_node = &memblock.memory.regions[7]; in alloc_nid_numa_split_all_reserved_generic_check()
2466 struct region r1, r2; in alloc_nid_numa_split_all_reserved_generic_check()
2474 r2.base = next_node->base + SZ_128; in alloc_nid_numa_split_all_reserved_generic_check()
2475 r2.size = memblock_end_of_DRAM() - r2.base; in alloc_nid_numa_split_all_reserved_generic_check()
2477 r1.size = MEM_SIZE - (r2.size + size); in alloc_nid_numa_split_all_reserved_generic_check()
2498 * A simple test that tries to allocate a memory region through the
2500 * correct NUMA node set for the new region.
2507 struct memblock_region *req_node = &memblock.memory.regions[nid_req]; in alloc_node_on_correct_nid()
2518 ASSERT_EQ(nid_req, req_node->nid); in alloc_node_on_correct_nid()