xref: /linux/tools/testing/memblock/tests/alloc_nid_api.c (revision 42c3ba86581896be8dd7fb88ed075b600fd57fa1)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 #include "alloc_nid_api.h"
3 
4 static int alloc_nid_test_flags = TEST_F_NONE;
5 
6 static inline const char * const get_memblock_alloc_try_nid_name(int flags)
7 {
8 	if (flags & TEST_F_RAW)
9 		return "memblock_alloc_try_nid_raw";
10 	return "memblock_alloc_try_nid";
11 }
12 
13 static inline void *run_memblock_alloc_try_nid(phys_addr_t size,
14 					       phys_addr_t align,
15 					       phys_addr_t min_addr,
16 					       phys_addr_t max_addr, int nid)
17 {
18 	if (alloc_nid_test_flags & TEST_F_RAW)
19 		return memblock_alloc_try_nid_raw(size, align, min_addr,
20 						  max_addr, nid);
21 	return memblock_alloc_try_nid(size, align, min_addr, max_addr, nid);
22 }
23 
24 /*
25  * A simple test that tries to allocate a memory region within min_addr and
26  * max_addr range:
27  *
28  *        +                   +
29  *   |    +       +-----------+      |
30  *   |    |       |    rgn    |      |
31  *   +----+-------+-----------+------+
32  *        ^                   ^
33  *        |                   |
34  *        min_addr           max_addr
35  *
36  * Expect to allocate a region that ends at max_addr.
37  */
38 static int alloc_try_nid_top_down_simple_check(void)
39 {
40 	struct memblock_region *rgn = &memblock.reserved.regions[0];
41 	void *allocated_ptr = NULL;
42 	phys_addr_t size = SZ_128;
43 	phys_addr_t min_addr;
44 	phys_addr_t max_addr;
45 	phys_addr_t rgn_end;
46 
47 	PREFIX_PUSH();
48 	setup_memblock();
49 
50 	min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES * 2;
51 	max_addr = min_addr + SZ_512;
52 
53 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
54 						   min_addr, max_addr,
55 						   NUMA_NO_NODE);
56 	rgn_end = rgn->base + rgn->size;
57 
58 	ASSERT_NE(allocated_ptr, NULL);
59 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
60 
61 	ASSERT_EQ(rgn->size, size);
62 	ASSERT_EQ(rgn->base, max_addr - size);
63 	ASSERT_EQ(rgn_end, max_addr);
64 
65 	ASSERT_EQ(memblock.reserved.cnt, 1);
66 	ASSERT_EQ(memblock.reserved.total_size, size);
67 
68 	test_pass_pop();
69 
70 	return 0;
71 }
72 
73 /*
74  * A simple test that tries to allocate a memory region within min_addr and
75  * max_addr range, where the end address is misaligned:
76  *
77  *         +       +            +
78  *  |      +       +---------+  +    |
79  *  |      |       |   rgn   |  |    |
80  *  +------+-------+---------+--+----+
81  *         ^       ^            ^
82  *         |       |            |
83  *       min_add   |            max_addr
84  *                 |
85  *                 Aligned address
86  *                 boundary
87  *
88  * Expect to allocate an aligned region that ends before max_addr.
89  */
90 static int alloc_try_nid_top_down_end_misaligned_check(void)
91 {
92 	struct memblock_region *rgn = &memblock.reserved.regions[0];
93 	void *allocated_ptr = NULL;
94 	phys_addr_t size = SZ_128;
95 	phys_addr_t misalign = SZ_2;
96 	phys_addr_t min_addr;
97 	phys_addr_t max_addr;
98 	phys_addr_t rgn_end;
99 
100 	PREFIX_PUSH();
101 	setup_memblock();
102 
103 	min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES * 2;
104 	max_addr = min_addr + SZ_512 + misalign;
105 
106 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
107 						   min_addr, max_addr,
108 						   NUMA_NO_NODE);
109 	rgn_end = rgn->base + rgn->size;
110 
111 	ASSERT_NE(allocated_ptr, NULL);
112 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
113 
114 	ASSERT_EQ(rgn->size, size);
115 	ASSERT_EQ(rgn->base, max_addr - size - misalign);
116 	ASSERT_LT(rgn_end, max_addr);
117 
118 	ASSERT_EQ(memblock.reserved.cnt, 1);
119 	ASSERT_EQ(memblock.reserved.total_size, size);
120 
121 	test_pass_pop();
122 
123 	return 0;
124 }
125 
126 /*
127  * A simple test that tries to allocate a memory region, which spans over the
128  * min_addr and max_addr range:
129  *
130  *         +               +
131  *  |      +---------------+       |
132  *  |      |      rgn      |       |
133  *  +------+---------------+-------+
134  *         ^               ^
135  *         |               |
136  *         min_addr        max_addr
137  *
138  * Expect to allocate a region that starts at min_addr and ends at
139  * max_addr, given that min_addr is aligned.
140  */
141 static int alloc_try_nid_exact_address_generic_check(void)
142 {
143 	struct memblock_region *rgn = &memblock.reserved.regions[0];
144 	void *allocated_ptr = NULL;
145 	phys_addr_t size = SZ_1K;
146 	phys_addr_t min_addr;
147 	phys_addr_t max_addr;
148 	phys_addr_t rgn_end;
149 
150 	PREFIX_PUSH();
151 	setup_memblock();
152 
153 	min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES;
154 	max_addr = min_addr + size;
155 
156 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
157 						   min_addr, max_addr,
158 						   NUMA_NO_NODE);
159 	rgn_end = rgn->base + rgn->size;
160 
161 	ASSERT_NE(allocated_ptr, NULL);
162 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
163 
164 	ASSERT_EQ(rgn->size, size);
165 	ASSERT_EQ(rgn->base, min_addr);
166 	ASSERT_EQ(rgn_end, max_addr);
167 
168 	ASSERT_EQ(memblock.reserved.cnt, 1);
169 	ASSERT_EQ(memblock.reserved.total_size, size);
170 
171 	test_pass_pop();
172 
173 	return 0;
174 }
175 
176 /*
177  * A test that tries to allocate a memory region, which can't fit into
178  * min_addr and max_addr range:
179  *
180  *           +          +     +
181  *  |        +----------+-----+    |
182  *  |        |   rgn    +     |    |
183  *  +--------+----------+-----+----+
184  *           ^          ^     ^
185  *           |          |     |
186  *           Aligned    |    max_addr
187  *           address    |
188  *           boundary   min_add
189  *
190  * Expect to drop the lower limit and allocate a memory region which
191  * ends at max_addr (if the address is aligned).
192  */
193 static int alloc_try_nid_top_down_narrow_range_check(void)
194 {
195 	struct memblock_region *rgn = &memblock.reserved.regions[0];
196 	void *allocated_ptr = NULL;
197 	phys_addr_t size = SZ_256;
198 	phys_addr_t min_addr;
199 	phys_addr_t max_addr;
200 
201 	PREFIX_PUSH();
202 	setup_memblock();
203 
204 	min_addr = memblock_start_of_DRAM() + SZ_512;
205 	max_addr = min_addr + SMP_CACHE_BYTES;
206 
207 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
208 						   min_addr, max_addr,
209 						   NUMA_NO_NODE);
210 
211 	ASSERT_NE(allocated_ptr, NULL);
212 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
213 
214 	ASSERT_EQ(rgn->size, size);
215 	ASSERT_EQ(rgn->base, max_addr - size);
216 
217 	ASSERT_EQ(memblock.reserved.cnt, 1);
218 	ASSERT_EQ(memblock.reserved.total_size, size);
219 
220 	test_pass_pop();
221 
222 	return 0;
223 }
224 
225 /*
226  * A test that tries to allocate a memory region, which can't fit into
227  * min_addr and max_addr range, with the latter being too close to the beginning
228  * of the available memory:
229  *
230  *   +-------------+
231  *   |     new     |
232  *   +-------------+
233  *         +       +
234  *         |       +              |
235  *         |       |              |
236  *         +-------+--------------+
237  *         ^       ^
238  *         |       |
239  *         |       max_addr
240  *         |
241  *         min_addr
242  *
243  * Expect no allocation to happen.
244  */
245 static int alloc_try_nid_low_max_generic_check(void)
246 {
247 	void *allocated_ptr = NULL;
248 	phys_addr_t size = SZ_1K;
249 	phys_addr_t min_addr;
250 	phys_addr_t max_addr;
251 
252 	PREFIX_PUSH();
253 	setup_memblock();
254 
255 	min_addr = memblock_start_of_DRAM();
256 	max_addr = min_addr + SMP_CACHE_BYTES;
257 
258 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
259 						   min_addr, max_addr,
260 						   NUMA_NO_NODE);
261 
262 	ASSERT_EQ(allocated_ptr, NULL);
263 
264 	test_pass_pop();
265 
266 	return 0;
267 }
268 
269 /*
270  * A test that tries to allocate a memory region within min_addr min_addr range,
271  * with min_addr being so close that it's next to an allocated region:
272  *
273  *          +                        +
274  *  |       +--------+---------------|
275  *  |       |   r1   |      rgn      |
276  *  +-------+--------+---------------+
277  *          ^                        ^
278  *          |                        |
279  *          min_addr                 max_addr
280  *
281  * Expect a merge of both regions. Only the region size gets updated.
282  */
283 static int alloc_try_nid_min_reserved_generic_check(void)
284 {
285 	struct memblock_region *rgn = &memblock.reserved.regions[0];
286 	void *allocated_ptr = NULL;
287 	phys_addr_t r1_size = SZ_128;
288 	phys_addr_t r2_size = SZ_64;
289 	phys_addr_t total_size = r1_size + r2_size;
290 	phys_addr_t min_addr;
291 	phys_addr_t max_addr;
292 	phys_addr_t reserved_base;
293 
294 	PREFIX_PUSH();
295 	setup_memblock();
296 
297 	max_addr = memblock_end_of_DRAM();
298 	min_addr = max_addr - r2_size;
299 	reserved_base = min_addr - r1_size;
300 
301 	memblock_reserve(reserved_base, r1_size);
302 
303 	allocated_ptr = run_memblock_alloc_try_nid(r2_size, SMP_CACHE_BYTES,
304 						   min_addr, max_addr,
305 						   NUMA_NO_NODE);
306 
307 	ASSERT_NE(allocated_ptr, NULL);
308 	assert_mem_content(allocated_ptr, r2_size, alloc_nid_test_flags);
309 
310 	ASSERT_EQ(rgn->size, total_size);
311 	ASSERT_EQ(rgn->base, reserved_base);
312 
313 	ASSERT_EQ(memblock.reserved.cnt, 1);
314 	ASSERT_EQ(memblock.reserved.total_size, total_size);
315 
316 	test_pass_pop();
317 
318 	return 0;
319 }
320 
321 /*
322  * A test that tries to allocate a memory region within min_addr and max_addr,
323  * with max_addr being so close that it's next to an allocated region:
324  *
325  *             +             +
326  *  |          +-------------+--------|
327  *  |          |     rgn     |   r1   |
328  *  +----------+-------------+--------+
329  *             ^             ^
330  *             |             |
331  *             min_addr      max_addr
332  *
333  * Expect a merge of regions. Only the region size gets updated.
334  */
335 static int alloc_try_nid_max_reserved_generic_check(void)
336 {
337 	struct memblock_region *rgn = &memblock.reserved.regions[0];
338 	void *allocated_ptr = NULL;
339 	phys_addr_t r1_size = SZ_64;
340 	phys_addr_t r2_size = SZ_128;
341 	phys_addr_t total_size = r1_size + r2_size;
342 	phys_addr_t min_addr;
343 	phys_addr_t max_addr;
344 
345 	PREFIX_PUSH();
346 	setup_memblock();
347 
348 	max_addr = memblock_end_of_DRAM() - r1_size;
349 	min_addr = max_addr - r2_size;
350 
351 	memblock_reserve(max_addr, r1_size);
352 
353 	allocated_ptr = run_memblock_alloc_try_nid(r2_size, SMP_CACHE_BYTES,
354 						   min_addr, max_addr,
355 						   NUMA_NO_NODE);
356 
357 	ASSERT_NE(allocated_ptr, NULL);
358 	assert_mem_content(allocated_ptr, r2_size, alloc_nid_test_flags);
359 
360 	ASSERT_EQ(rgn->size, total_size);
361 	ASSERT_EQ(rgn->base, min_addr);
362 
363 	ASSERT_EQ(memblock.reserved.cnt, 1);
364 	ASSERT_EQ(memblock.reserved.total_size, total_size);
365 
366 	test_pass_pop();
367 
368 	return 0;
369 }
370 
371 /*
372  * A test that tries to allocate memory within min_addr and max_add range, when
373  * there are two reserved regions at the borders, with a gap big enough to fit
374  * a new region:
375  *
376  *                +           +
377  *  |    +--------+   +-------+------+  |
378  *  |    |   r2   |   |  rgn  |  r1  |  |
379  *  +----+--------+---+-------+------+--+
380  *                ^           ^
381  *                |           |
382  *                min_addr    max_addr
383  *
384  * Expect to merge the new region with r1. The second region does not get
385  * updated. The total size field gets updated.
386  */
387 
388 static int alloc_try_nid_top_down_reserved_with_space_check(void)
389 {
390 	struct memblock_region *rgn1 = &memblock.reserved.regions[1];
391 	struct memblock_region *rgn2 = &memblock.reserved.regions[0];
392 	void *allocated_ptr = NULL;
393 	struct region r1, r2;
394 	phys_addr_t r3_size = SZ_64;
395 	phys_addr_t gap_size = SMP_CACHE_BYTES;
396 	phys_addr_t total_size;
397 	phys_addr_t max_addr;
398 	phys_addr_t min_addr;
399 
400 	PREFIX_PUSH();
401 	setup_memblock();
402 
403 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;
404 	r1.size = SMP_CACHE_BYTES;
405 
406 	r2.size = SZ_128;
407 	r2.base = r1.base - (r3_size + gap_size + r2.size);
408 
409 	total_size = r1.size + r2.size + r3_size;
410 	min_addr = r2.base + r2.size;
411 	max_addr = r1.base;
412 
413 	memblock_reserve(r1.base, r1.size);
414 	memblock_reserve(r2.base, r2.size);
415 
416 	allocated_ptr = run_memblock_alloc_try_nid(r3_size, SMP_CACHE_BYTES,
417 						   min_addr, max_addr,
418 						   NUMA_NO_NODE);
419 
420 	ASSERT_NE(allocated_ptr, NULL);
421 	assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);
422 
423 	ASSERT_EQ(rgn1->size, r1.size + r3_size);
424 	ASSERT_EQ(rgn1->base, max_addr - r3_size);
425 
426 	ASSERT_EQ(rgn2->size, r2.size);
427 	ASSERT_EQ(rgn2->base, r2.base);
428 
429 	ASSERT_EQ(memblock.reserved.cnt, 2);
430 	ASSERT_EQ(memblock.reserved.total_size, total_size);
431 
432 	test_pass_pop();
433 
434 	return 0;
435 }
436 
437 /*
438  * A test that tries to allocate memory within min_addr and max_add range, when
439  * there are two reserved regions at the borders, with a gap of a size equal to
440  * the size of the new region:
441  *
442  *                 +        +
443  *  |     +--------+--------+--------+     |
444  *  |     |   r2   |   r3   |   r1   |     |
445  *  +-----+--------+--------+--------+-----+
446  *                 ^        ^
447  *                 |        |
448  *                 min_addr max_addr
449  *
450  * Expect to merge all of the regions into one. The region counter and total
451  * size fields get updated.
452  */
453 static int alloc_try_nid_reserved_full_merge_generic_check(void)
454 {
455 	struct memblock_region *rgn = &memblock.reserved.regions[0];
456 	void *allocated_ptr = NULL;
457 	struct region r1, r2;
458 	phys_addr_t r3_size = SZ_64;
459 	phys_addr_t total_size;
460 	phys_addr_t max_addr;
461 	phys_addr_t min_addr;
462 
463 	PREFIX_PUSH();
464 	setup_memblock();
465 
466 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;
467 	r1.size = SMP_CACHE_BYTES;
468 
469 	r2.size = SZ_128;
470 	r2.base = r1.base - (r3_size + r2.size);
471 
472 	total_size = r1.size + r2.size + r3_size;
473 	min_addr = r2.base + r2.size;
474 	max_addr = r1.base;
475 
476 	memblock_reserve(r1.base, r1.size);
477 	memblock_reserve(r2.base, r2.size);
478 
479 	allocated_ptr = run_memblock_alloc_try_nid(r3_size, SMP_CACHE_BYTES,
480 						   min_addr, max_addr,
481 						   NUMA_NO_NODE);
482 
483 	ASSERT_NE(allocated_ptr, NULL);
484 	assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);
485 
486 	ASSERT_EQ(rgn->size, total_size);
487 	ASSERT_EQ(rgn->base, r2.base);
488 
489 	ASSERT_EQ(memblock.reserved.cnt, 1);
490 	ASSERT_EQ(memblock.reserved.total_size, total_size);
491 
492 	test_pass_pop();
493 
494 	return 0;
495 }
496 
497 /*
498  * A test that tries to allocate memory within min_addr and max_add range, when
499  * there are two reserved regions at the borders, with a gap that can't fit
500  * a new region:
501  *
502  *                       +    +
503  *  |  +----------+------+    +------+   |
504  *  |  |    r3    |  r2  |    |  r1  |   |
505  *  +--+----------+------+----+------+---+
506  *                       ^    ^
507  *                       |    |
508  *                       |    max_addr
509  *                       |
510  *                       min_addr
511  *
512  * Expect to merge the new region with r2. The second region does not get
513  * updated. The total size counter gets updated.
514  */
515 static int alloc_try_nid_top_down_reserved_no_space_check(void)
516 {
517 	struct memblock_region *rgn1 = &memblock.reserved.regions[1];
518 	struct memblock_region *rgn2 = &memblock.reserved.regions[0];
519 	void *allocated_ptr = NULL;
520 	struct region r1, r2;
521 	phys_addr_t r3_size = SZ_256;
522 	phys_addr_t gap_size = SMP_CACHE_BYTES;
523 	phys_addr_t total_size;
524 	phys_addr_t max_addr;
525 	phys_addr_t min_addr;
526 
527 	PREFIX_PUSH();
528 	setup_memblock();
529 
530 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;
531 	r1.size = SMP_CACHE_BYTES;
532 
533 	r2.size = SZ_128;
534 	r2.base = r1.base - (r2.size + gap_size);
535 
536 	total_size = r1.size + r2.size + r3_size;
537 	min_addr = r2.base + r2.size;
538 	max_addr = r1.base;
539 
540 	memblock_reserve(r1.base, r1.size);
541 	memblock_reserve(r2.base, r2.size);
542 
543 	allocated_ptr = run_memblock_alloc_try_nid(r3_size, SMP_CACHE_BYTES,
544 						   min_addr, max_addr,
545 						   NUMA_NO_NODE);
546 
547 	ASSERT_NE(allocated_ptr, NULL);
548 	assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);
549 
550 	ASSERT_EQ(rgn1->size, r1.size);
551 	ASSERT_EQ(rgn1->base, r1.base);
552 
553 	ASSERT_EQ(rgn2->size, r2.size + r3_size);
554 	ASSERT_EQ(rgn2->base, r2.base - r3_size);
555 
556 	ASSERT_EQ(memblock.reserved.cnt, 2);
557 	ASSERT_EQ(memblock.reserved.total_size, total_size);
558 
559 	test_pass_pop();
560 
561 	return 0;
562 }
563 
564 /*
565  * A test that tries to allocate memory within min_addr and max_add range, but
566  * it's too narrow and everything else is reserved:
567  *
568  *            +-----------+
569  *            |    new    |
570  *            +-----------+
571  *                 +      +
572  *  |--------------+      +----------|
573  *  |      r2      |      |    r1    |
574  *  +--------------+------+----------+
575  *                 ^      ^
576  *                 |      |
577  *                 |      max_addr
578  *                 |
579  *                 min_addr
580  *
581  * Expect no allocation to happen.
582  */
583 
584 static int alloc_try_nid_reserved_all_generic_check(void)
585 {
586 	void *allocated_ptr = NULL;
587 	struct region r1, r2;
588 	phys_addr_t r3_size = SZ_256;
589 	phys_addr_t gap_size = SMP_CACHE_BYTES;
590 	phys_addr_t max_addr;
591 	phys_addr_t min_addr;
592 
593 	PREFIX_PUSH();
594 	setup_memblock();
595 
596 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES;
597 	r1.size = SMP_CACHE_BYTES;
598 
599 	r2.size = MEM_SIZE - (r1.size + gap_size);
600 	r2.base = memblock_start_of_DRAM();
601 
602 	min_addr = r2.base + r2.size;
603 	max_addr = r1.base;
604 
605 	memblock_reserve(r1.base, r1.size);
606 	memblock_reserve(r2.base, r2.size);
607 
608 	allocated_ptr = run_memblock_alloc_try_nid(r3_size, SMP_CACHE_BYTES,
609 						   min_addr, max_addr,
610 						   NUMA_NO_NODE);
611 
612 	ASSERT_EQ(allocated_ptr, NULL);
613 
614 	test_pass_pop();
615 
616 	return 0;
617 }
618 
619 /*
620  * A test that tries to allocate a memory region, where max_addr is
621  * bigger than the end address of the available memory. Expect to allocate
622  * a region that ends before the end of the memory.
623  */
624 static int alloc_try_nid_top_down_cap_max_check(void)
625 {
626 	struct memblock_region *rgn = &memblock.reserved.regions[0];
627 	void *allocated_ptr = NULL;
628 	phys_addr_t size = SZ_256;
629 	phys_addr_t min_addr;
630 	phys_addr_t max_addr;
631 
632 	PREFIX_PUSH();
633 	setup_memblock();
634 
635 	min_addr = memblock_end_of_DRAM() - SZ_1K;
636 	max_addr = memblock_end_of_DRAM() + SZ_256;
637 
638 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
639 						   min_addr, max_addr,
640 						   NUMA_NO_NODE);
641 
642 	ASSERT_NE(allocated_ptr, NULL);
643 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
644 
645 	ASSERT_EQ(rgn->size, size);
646 	ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size);
647 
648 	ASSERT_EQ(memblock.reserved.cnt, 1);
649 	ASSERT_EQ(memblock.reserved.total_size, size);
650 
651 	test_pass_pop();
652 
653 	return 0;
654 }
655 
656 /*
657  * A test that tries to allocate a memory region, where min_addr is
658  * smaller than the start address of the available memory. Expect to allocate
659  * a region that ends before the end of the memory.
660  */
661 static int alloc_try_nid_top_down_cap_min_check(void)
662 {
663 	struct memblock_region *rgn = &memblock.reserved.regions[0];
664 	void *allocated_ptr = NULL;
665 	phys_addr_t size = SZ_1K;
666 	phys_addr_t min_addr;
667 	phys_addr_t max_addr;
668 
669 	PREFIX_PUSH();
670 	setup_memblock();
671 
672 	min_addr = memblock_start_of_DRAM() - SZ_256;
673 	max_addr = memblock_end_of_DRAM();
674 
675 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
676 						   min_addr, max_addr,
677 						   NUMA_NO_NODE);
678 
679 	ASSERT_NE(allocated_ptr, NULL);
680 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
681 
682 	ASSERT_EQ(rgn->size, size);
683 	ASSERT_EQ(rgn->base, memblock_end_of_DRAM() - size);
684 
685 	ASSERT_EQ(memblock.reserved.cnt, 1);
686 	ASSERT_EQ(memblock.reserved.total_size, size);
687 
688 	test_pass_pop();
689 
690 	return 0;
691 }
692 
693 /*
694  * A simple test that tries to allocate a memory region within min_addr and
695  * max_addr range:
696  *
697  *        +                       +
698  *   |    +-----------+           |      |
699  *   |    |    rgn    |           |      |
700  *   +----+-----------+-----------+------+
701  *        ^                       ^
702  *        |                       |
703  *        min_addr                max_addr
704  *
705  * Expect to allocate a region that ends before max_addr.
706  */
707 static int alloc_try_nid_bottom_up_simple_check(void)
708 {
709 	struct memblock_region *rgn = &memblock.reserved.regions[0];
710 	void *allocated_ptr = NULL;
711 	phys_addr_t size = SZ_128;
712 	phys_addr_t min_addr;
713 	phys_addr_t max_addr;
714 	phys_addr_t rgn_end;
715 
716 	PREFIX_PUSH();
717 	setup_memblock();
718 
719 	min_addr = memblock_start_of_DRAM() + SMP_CACHE_BYTES * 2;
720 	max_addr = min_addr + SZ_512;
721 
722 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
723 						   min_addr, max_addr,
724 						   NUMA_NO_NODE);
725 	rgn_end = rgn->base + rgn->size;
726 
727 	ASSERT_NE(allocated_ptr, NULL);
728 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
729 
730 	ASSERT_EQ(rgn->size, size);
731 	ASSERT_EQ(rgn->base, min_addr);
732 	ASSERT_LT(rgn_end, max_addr);
733 
734 	ASSERT_EQ(memblock.reserved.cnt, 1);
735 	ASSERT_EQ(memblock.reserved.total_size, size);
736 
737 	test_pass_pop();
738 
739 	return 0;
740 }
741 
742 /*
743  * A simple test that tries to allocate a memory region within min_addr and
744  * max_addr range, where the start address is misaligned:
745  *
746  *        +                     +
747  *  |     +   +-----------+     +     |
748  *  |     |   |    rgn    |     |     |
749  *  +-----+---+-----------+-----+-----+
750  *        ^   ^----.            ^
751  *        |        |            |
752  *     min_add     |            max_addr
753  *                 |
754  *                 Aligned address
755  *                 boundary
756  *
757  * Expect to allocate an aligned region that ends before max_addr.
758  */
759 static int alloc_try_nid_bottom_up_start_misaligned_check(void)
760 {
761 	struct memblock_region *rgn = &memblock.reserved.regions[0];
762 	void *allocated_ptr = NULL;
763 	phys_addr_t size = SZ_128;
764 	phys_addr_t misalign = SZ_2;
765 	phys_addr_t min_addr;
766 	phys_addr_t max_addr;
767 	phys_addr_t rgn_end;
768 
769 	PREFIX_PUSH();
770 	setup_memblock();
771 
772 	min_addr = memblock_start_of_DRAM() + misalign;
773 	max_addr = min_addr + SZ_512;
774 
775 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
776 						   min_addr, max_addr,
777 						   NUMA_NO_NODE);
778 	rgn_end = rgn->base + rgn->size;
779 
780 	ASSERT_NE(allocated_ptr, NULL);
781 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
782 
783 	ASSERT_EQ(rgn->size, size);
784 	ASSERT_EQ(rgn->base, min_addr + (SMP_CACHE_BYTES - misalign));
785 	ASSERT_LT(rgn_end, max_addr);
786 
787 	ASSERT_EQ(memblock.reserved.cnt, 1);
788 	ASSERT_EQ(memblock.reserved.total_size, size);
789 
790 	test_pass_pop();
791 
792 	return 0;
793 }
794 
795 /*
796  * A test that tries to allocate a memory region, which can't fit into min_addr
797  * and max_addr range:
798  *
799  *                      +    +
800  *  |---------+         +    +      |
801  *  |   rgn   |         |    |      |
802  *  +---------+---------+----+------+
803  *                      ^    ^
804  *                      |    |
805  *                      |    max_addr
806  *                      |
807  *                      min_add
808  *
809  * Expect to drop the lower limit and allocate a memory region which
810  * starts at the beginning of the available memory.
811  */
812 static int alloc_try_nid_bottom_up_narrow_range_check(void)
813 {
814 	struct memblock_region *rgn = &memblock.reserved.regions[0];
815 	void *allocated_ptr = NULL;
816 	phys_addr_t size = SZ_256;
817 	phys_addr_t min_addr;
818 	phys_addr_t max_addr;
819 
820 	PREFIX_PUSH();
821 	setup_memblock();
822 
823 	min_addr = memblock_start_of_DRAM() + SZ_512;
824 	max_addr = min_addr + SMP_CACHE_BYTES;
825 
826 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
827 						   min_addr, max_addr,
828 						   NUMA_NO_NODE);
829 
830 	ASSERT_NE(allocated_ptr, NULL);
831 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
832 
833 	ASSERT_EQ(rgn->size, size);
834 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
835 
836 	ASSERT_EQ(memblock.reserved.cnt, 1);
837 	ASSERT_EQ(memblock.reserved.total_size, size);
838 
839 	test_pass_pop();
840 
841 	return 0;
842 }
843 
844 /*
845  * A test that tries to allocate memory within min_addr and max_add range, when
846  * there are two reserved regions at the borders, with a gap big enough to fit
847  * a new region:
848  *
849  *                +           +
850  *  |    +--------+-------+   +------+  |
851  *  |    |   r2   |  rgn  |   |  r1  |  |
852  *  +----+--------+-------+---+------+--+
853  *                ^           ^
854  *                |           |
855  *                min_addr    max_addr
856  *
857  * Expect to merge the new region with r2. The second region does not get
858  * updated. The total size field gets updated.
859  */
860 
861 static int alloc_try_nid_bottom_up_reserved_with_space_check(void)
862 {
863 	struct memblock_region *rgn1 = &memblock.reserved.regions[1];
864 	struct memblock_region *rgn2 = &memblock.reserved.regions[0];
865 	void *allocated_ptr = NULL;
866 	struct region r1, r2;
867 	phys_addr_t r3_size = SZ_64;
868 	phys_addr_t gap_size = SMP_CACHE_BYTES;
869 	phys_addr_t total_size;
870 	phys_addr_t max_addr;
871 	phys_addr_t min_addr;
872 
873 	PREFIX_PUSH();
874 	setup_memblock();
875 
876 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;
877 	r1.size = SMP_CACHE_BYTES;
878 
879 	r2.size = SZ_128;
880 	r2.base = r1.base - (r3_size + gap_size + r2.size);
881 
882 	total_size = r1.size + r2.size + r3_size;
883 	min_addr = r2.base + r2.size;
884 	max_addr = r1.base;
885 
886 	memblock_reserve(r1.base, r1.size);
887 	memblock_reserve(r2.base, r2.size);
888 
889 	allocated_ptr = run_memblock_alloc_try_nid(r3_size, SMP_CACHE_BYTES,
890 						   min_addr, max_addr,
891 						   NUMA_NO_NODE);
892 
893 	ASSERT_NE(allocated_ptr, NULL);
894 	assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);
895 
896 	ASSERT_EQ(rgn1->size, r1.size);
897 	ASSERT_EQ(rgn1->base, max_addr);
898 
899 	ASSERT_EQ(rgn2->size, r2.size + r3_size);
900 	ASSERT_EQ(rgn2->base, r2.base);
901 
902 	ASSERT_EQ(memblock.reserved.cnt, 2);
903 	ASSERT_EQ(memblock.reserved.total_size, total_size);
904 
905 	test_pass_pop();
906 
907 	return 0;
908 }
909 
910 /*
911  * A test that tries to allocate memory within min_addr and max_add range, when
912  * there are two reserved regions at the borders, with a gap of a size equal to
913  * the size of the new region:
914  *
915  *                         +   +
916  *  |----------+    +------+   +----+  |
917  *  |    r3    |    |  r2  |   | r1 |  |
918  *  +----------+----+------+---+----+--+
919  *                         ^   ^
920  *                         |   |
921  *                         |  max_addr
922  *                         |
923  *                         min_addr
924  *
925  * Expect to drop the lower limit and allocate memory at the beginning of the
926  * available memory. The region counter and total size fields get updated.
927  * Other regions are not modified.
928  */
929 
930 static int alloc_try_nid_bottom_up_reserved_no_space_check(void)
931 {
932 	struct memblock_region *rgn1 = &memblock.reserved.regions[2];
933 	struct memblock_region *rgn2 = &memblock.reserved.regions[1];
934 	struct memblock_region *rgn3 = &memblock.reserved.regions[0];
935 	void *allocated_ptr = NULL;
936 	struct region r1, r2;
937 	phys_addr_t r3_size = SZ_256;
938 	phys_addr_t gap_size = SMP_CACHE_BYTES;
939 	phys_addr_t total_size;
940 	phys_addr_t max_addr;
941 	phys_addr_t min_addr;
942 
943 	PREFIX_PUSH();
944 	setup_memblock();
945 
946 	r1.base = memblock_end_of_DRAM() - SMP_CACHE_BYTES * 2;
947 	r1.size = SMP_CACHE_BYTES;
948 
949 	r2.size = SZ_128;
950 	r2.base = r1.base - (r2.size + gap_size);
951 
952 	total_size = r1.size + r2.size + r3_size;
953 	min_addr = r2.base + r2.size;
954 	max_addr = r1.base;
955 
956 	memblock_reserve(r1.base, r1.size);
957 	memblock_reserve(r2.base, r2.size);
958 
959 	allocated_ptr = run_memblock_alloc_try_nid(r3_size, SMP_CACHE_BYTES,
960 						   min_addr, max_addr,
961 						   NUMA_NO_NODE);
962 
963 	ASSERT_NE(allocated_ptr, NULL);
964 	assert_mem_content(allocated_ptr, r3_size, alloc_nid_test_flags);
965 
966 	ASSERT_EQ(rgn3->size, r3_size);
967 	ASSERT_EQ(rgn3->base, memblock_start_of_DRAM());
968 
969 	ASSERT_EQ(rgn2->size, r2.size);
970 	ASSERT_EQ(rgn2->base, r2.base);
971 
972 	ASSERT_EQ(rgn1->size, r1.size);
973 	ASSERT_EQ(rgn1->base, r1.base);
974 
975 	ASSERT_EQ(memblock.reserved.cnt, 3);
976 	ASSERT_EQ(memblock.reserved.total_size, total_size);
977 
978 	test_pass_pop();
979 
980 	return 0;
981 }
982 
983 /*
984  * A test that tries to allocate a memory region, where max_addr is
985  * bigger than the end address of the available memory. Expect to allocate
986  * a region that starts at the min_addr.
987  */
988 static int alloc_try_nid_bottom_up_cap_max_check(void)
989 {
990 	struct memblock_region *rgn = &memblock.reserved.regions[0];
991 	void *allocated_ptr = NULL;
992 	phys_addr_t size = SZ_256;
993 	phys_addr_t min_addr;
994 	phys_addr_t max_addr;
995 
996 	PREFIX_PUSH();
997 	setup_memblock();
998 
999 	min_addr = memblock_start_of_DRAM() + SZ_1K;
1000 	max_addr = memblock_end_of_DRAM() + SZ_256;
1001 
1002 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
1003 						   min_addr, max_addr,
1004 						   NUMA_NO_NODE);
1005 
1006 	ASSERT_NE(allocated_ptr, NULL);
1007 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
1008 
1009 	ASSERT_EQ(rgn->size, size);
1010 	ASSERT_EQ(rgn->base, min_addr);
1011 
1012 	ASSERT_EQ(memblock.reserved.cnt, 1);
1013 	ASSERT_EQ(memblock.reserved.total_size, size);
1014 
1015 	test_pass_pop();
1016 
1017 	return 0;
1018 }
1019 
1020 /*
1021  * A test that tries to allocate a memory region, where min_addr is
1022  * smaller than the start address of the available memory. Expect to allocate
1023  * a region at the beginning of the available memory.
1024  */
1025 static int alloc_try_nid_bottom_up_cap_min_check(void)
1026 {
1027 	struct memblock_region *rgn = &memblock.reserved.regions[0];
1028 	void *allocated_ptr = NULL;
1029 	phys_addr_t size = SZ_1K;
1030 	phys_addr_t min_addr;
1031 	phys_addr_t max_addr;
1032 
1033 	PREFIX_PUSH();
1034 	setup_memblock();
1035 
1036 	min_addr = memblock_start_of_DRAM();
1037 	max_addr = memblock_end_of_DRAM() - SZ_256;
1038 
1039 	allocated_ptr = run_memblock_alloc_try_nid(size, SMP_CACHE_BYTES,
1040 						   min_addr, max_addr,
1041 						   NUMA_NO_NODE);
1042 
1043 	ASSERT_NE(allocated_ptr, NULL);
1044 	assert_mem_content(allocated_ptr, size, alloc_nid_test_flags);
1045 
1046 	ASSERT_EQ(rgn->size, size);
1047 	ASSERT_EQ(rgn->base, memblock_start_of_DRAM());
1048 
1049 	ASSERT_EQ(memblock.reserved.cnt, 1);
1050 	ASSERT_EQ(memblock.reserved.total_size, size);
1051 
1052 	test_pass_pop();
1053 
1054 	return 0;
1055 }
1056 
1057 /* Test case wrappers */
1058 static int alloc_try_nid_simple_check(void)
1059 {
1060 	test_print("\tRunning %s...\n", __func__);
1061 	memblock_set_bottom_up(false);
1062 	alloc_try_nid_top_down_simple_check();
1063 	memblock_set_bottom_up(true);
1064 	alloc_try_nid_bottom_up_simple_check();
1065 
1066 	return 0;
1067 }
1068 
1069 static int alloc_try_nid_misaligned_check(void)
1070 {
1071 	test_print("\tRunning %s...\n", __func__);
1072 	memblock_set_bottom_up(false);
1073 	alloc_try_nid_top_down_end_misaligned_check();
1074 	memblock_set_bottom_up(true);
1075 	alloc_try_nid_bottom_up_start_misaligned_check();
1076 
1077 	return 0;
1078 }
1079 
1080 static int alloc_try_nid_narrow_range_check(void)
1081 {
1082 	test_print("\tRunning %s...\n", __func__);
1083 	memblock_set_bottom_up(false);
1084 	alloc_try_nid_top_down_narrow_range_check();
1085 	memblock_set_bottom_up(true);
1086 	alloc_try_nid_bottom_up_narrow_range_check();
1087 
1088 	return 0;
1089 }
1090 
1091 static int alloc_try_nid_reserved_with_space_check(void)
1092 {
1093 	test_print("\tRunning %s...\n", __func__);
1094 	memblock_set_bottom_up(false);
1095 	alloc_try_nid_top_down_reserved_with_space_check();
1096 	memblock_set_bottom_up(true);
1097 	alloc_try_nid_bottom_up_reserved_with_space_check();
1098 
1099 	return 0;
1100 }
1101 
1102 static int alloc_try_nid_reserved_no_space_check(void)
1103 {
1104 	test_print("\tRunning %s...\n", __func__);
1105 	memblock_set_bottom_up(false);
1106 	alloc_try_nid_top_down_reserved_no_space_check();
1107 	memblock_set_bottom_up(true);
1108 	alloc_try_nid_bottom_up_reserved_no_space_check();
1109 
1110 	return 0;
1111 }
1112 
1113 static int alloc_try_nid_cap_max_check(void)
1114 {
1115 	test_print("\tRunning %s...\n", __func__);
1116 	memblock_set_bottom_up(false);
1117 	alloc_try_nid_top_down_cap_max_check();
1118 	memblock_set_bottom_up(true);
1119 	alloc_try_nid_bottom_up_cap_max_check();
1120 
1121 	return 0;
1122 }
1123 
1124 static int alloc_try_nid_cap_min_check(void)
1125 {
1126 	test_print("\tRunning %s...\n", __func__);
1127 	memblock_set_bottom_up(false);
1128 	alloc_try_nid_top_down_cap_min_check();
1129 	memblock_set_bottom_up(true);
1130 	alloc_try_nid_bottom_up_cap_min_check();
1131 
1132 	return 0;
1133 }
1134 
1135 static int alloc_try_nid_min_reserved_check(void)
1136 {
1137 	test_print("\tRunning %s...\n", __func__);
1138 	run_top_down(alloc_try_nid_min_reserved_generic_check);
1139 	run_bottom_up(alloc_try_nid_min_reserved_generic_check);
1140 
1141 	return 0;
1142 }
1143 
1144 static int alloc_try_nid_max_reserved_check(void)
1145 {
1146 	test_print("\tRunning %s...\n", __func__);
1147 	run_top_down(alloc_try_nid_max_reserved_generic_check);
1148 	run_bottom_up(alloc_try_nid_max_reserved_generic_check);
1149 
1150 	return 0;
1151 }
1152 
1153 static int alloc_try_nid_exact_address_check(void)
1154 {
1155 	test_print("\tRunning %s...\n", __func__);
1156 	run_top_down(alloc_try_nid_exact_address_generic_check);
1157 	run_bottom_up(alloc_try_nid_exact_address_generic_check);
1158 
1159 	return 0;
1160 }
1161 
1162 static int alloc_try_nid_reserved_full_merge_check(void)
1163 {
1164 	test_print("\tRunning %s...\n", __func__);
1165 	run_top_down(alloc_try_nid_reserved_full_merge_generic_check);
1166 	run_bottom_up(alloc_try_nid_reserved_full_merge_generic_check);
1167 
1168 	return 0;
1169 }
1170 
1171 static int alloc_try_nid_reserved_all_check(void)
1172 {
1173 	test_print("\tRunning %s...\n", __func__);
1174 	run_top_down(alloc_try_nid_reserved_all_generic_check);
1175 	run_bottom_up(alloc_try_nid_reserved_all_generic_check);
1176 
1177 	return 0;
1178 }
1179 
1180 static int alloc_try_nid_low_max_check(void)
1181 {
1182 	test_print("\tRunning %s...\n", __func__);
1183 	run_top_down(alloc_try_nid_low_max_generic_check);
1184 	run_bottom_up(alloc_try_nid_low_max_generic_check);
1185 
1186 	return 0;
1187 }
1188 
1189 static int memblock_alloc_nid_checks_internal(int flags)
1190 {
1191 	const char *func = get_memblock_alloc_try_nid_name(flags);
1192 
1193 	alloc_nid_test_flags = flags;
1194 	prefix_reset();
1195 	prefix_push(func);
1196 	test_print("Running %s tests...\n", func);
1197 
1198 	reset_memblock_attributes();
1199 	dummy_physical_memory_init();
1200 
1201 	alloc_try_nid_simple_check();
1202 	alloc_try_nid_misaligned_check();
1203 	alloc_try_nid_narrow_range_check();
1204 	alloc_try_nid_reserved_with_space_check();
1205 	alloc_try_nid_reserved_no_space_check();
1206 	alloc_try_nid_cap_max_check();
1207 	alloc_try_nid_cap_min_check();
1208 
1209 	alloc_try_nid_min_reserved_check();
1210 	alloc_try_nid_max_reserved_check();
1211 	alloc_try_nid_exact_address_check();
1212 	alloc_try_nid_reserved_full_merge_check();
1213 	alloc_try_nid_reserved_all_check();
1214 	alloc_try_nid_low_max_check();
1215 
1216 	dummy_physical_memory_cleanup();
1217 
1218 	prefix_pop();
1219 
1220 	return 0;
1221 }
1222 
1223 int memblock_alloc_nid_checks(void)
1224 {
1225 	memblock_alloc_nid_checks_internal(TEST_F_NONE);
1226 	memblock_alloc_nid_checks_internal(TEST_F_RAW);
1227 
1228 	return 0;
1229 }
1230