1 /* $NetBSD: t_uvm_physseg.c,v 1.2 2016/12/22 08:15:20 cherry Exp $ */
2
3 /*-
4 * Copyright (c) 2015, 2016 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Santhosh N. Raju <santhosh.raju@gmail.com> and
9 * by Cherry G. Mathew
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
21 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
22 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
24 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30 * POSSIBILITY OF SUCH DAMAGE.
31 */
32
33 #include <sys/cdefs.h>
34 __RCSID("$NetBSD: t_uvm_physseg.c,v 1.2 2016/12/22 08:15:20 cherry Exp $");
35
36 /*
37 * If this line is commented out tests related to uvm_physseg_get_pmseg()
38 * wont run.
39 *
40 * Have a look at machine/uvm_physseg.h for more details.
41 */
42 #define __HAVE_PMAP_PHYSSEG
43
44 /*
45 * This is a dummy struct used for testing purposes
46 *
47 * In reality this struct would exist in the MD part of the code residing in
48 * machines/vmparam.h
49 */
50
51 #ifdef __HAVE_PMAP_PHYSSEG
52 struct pmap_physseg {
53 int dummy_variable; /* Dummy variable use for testing */
54 };
55 #endif
56
57 /* Testing API - assumes userland */
58 /* Provide Kernel API equivalents */
59 #include <assert.h>
60 #include <errno.h>
61 #include <stdbool.h>
62 #include <string.h> /* memset(3) et. al */
63 #include <stdio.h> /* printf(3) */
64 #include <stdlib.h> /* malloc(3) */
65 #include <stdarg.h>
66 #include <stddef.h>
67
68 #define PRIxPADDR "lx"
69 #define PRIxPSIZE "lx"
70 #define PRIuPSIZE "lu"
71 #define PRIxVADDR "lx"
72 #define PRIxVSIZE "lx"
73 #define PRIuVSIZE "lu"
74
75 #define UVM_HOTPLUG /* Enable hotplug with rbtree. */
76 #define PMAP_STEAL_MEMORY
77 #define DEBUG /* Enable debug functionality. */
78
79 typedef unsigned long vaddr_t;
80 typedef unsigned long paddr_t;
81 typedef unsigned long psize_t;
82 typedef unsigned long vsize_t;
83
84 #include <uvm/uvm_physseg.h>
85 #include <uvm/uvm_page.h>
86
87 #ifndef DIAGNOSTIC
88 #define KASSERTMSG(e, msg, ...) /* NOTHING */
89 #define KASSERT(e) /* NOTHING */
90 #else
91 #define KASSERT(a) assert(a)
92 #define KASSERTMSG(exp, ...) printf(__VA_ARGS__); assert((exp))
93 #endif
94
95 #define VM_PHYSSEG_STRAT VM_PSTRAT_BSEARCH
96
97 #define VM_NFREELIST 4
98 #define VM_FREELIST_DEFAULT 0
99 #define VM_FREELIST_FIRST16 3
100 #define VM_FREELIST_FIRST1G 2
101 #define VM_FREELIST_FIRST4G 1
102
103 /*
104 * Used in tests when Array implementation is tested
105 */
106 #if !defined(VM_PHYSSEG_MAX)
107 #define VM_PHYSSEG_MAX 1
108 #endif
109
110 #define PAGE_SHIFT 12
111 #define PAGE_SIZE (1 << PAGE_SHIFT)
112 #define PAGE_MASK (PAGE_SIZE - 1)
113 #define atop(x) (((paddr_t)(x)) >> PAGE_SHIFT)
114 #define ptoa(x) (((paddr_t)(x)) << PAGE_SHIFT)
115
116 #define mutex_enter(l)
117 #define mutex_exit(l)
118
119 psize_t physmem;
120
121 struct uvmexp uvmexp; /* decl */
122
123 /*
124 * uvm structure borrowed from uvm.h
125 *
126 * Remember this is a dummy structure used within the ATF Tests and
127 * uses only necessary fields from the original uvm struct.
128 * See uvm/uvm.h for the full struct.
129 */
130
131 struct uvm {
132 /* vm_page related parameters */
133
134 bool page_init_done; /* TRUE if uvm_page_init() finished */
135 } uvm;
136
137 #include <sys/kmem.h>
138
139 void *
kmem_alloc(size_t size,km_flag_t flags)140 kmem_alloc(size_t size, km_flag_t flags)
141 {
142 return malloc(size);
143 }
144
145 void *
kmem_zalloc(size_t size,km_flag_t flags)146 kmem_zalloc(size_t size, km_flag_t flags)
147 {
148 void *ptr;
149 ptr = malloc(size);
150
151 memset(ptr, 0, size);
152
153 return ptr;
154 }
155
156 void
kmem_free(void * mem,size_t size)157 kmem_free(void *mem, size_t size)
158 {
159 free(mem);
160 }
161
162 static void
panic(const char * fmt,...)163 panic(const char *fmt, ...)
164 {
165 va_list ap;
166
167 va_start(ap, fmt);
168 vprintf(fmt, ap);
169 printf("\n");
170 va_end(ap);
171 KASSERT(false);
172
173 /*NOTREACHED*/
174 }
175
176 static void
uvm_pagefree(struct vm_page * pg)177 uvm_pagefree(struct vm_page *pg)
178 {
179 return;
180 }
181
182 #if defined(UVM_HOTPLUG)
183 static void
uvmpdpol_reinit(void)184 uvmpdpol_reinit(void)
185 {
186 return;
187 }
188 #endif /* UVM_HOTPLUG */
189
190 /* end - Provide Kernel API equivalents */
191
192
193 #include "uvm/uvm_physseg.c"
194
195 #include <atf-c.h>
196
197 #define SIXTYFOUR_KILO (64 * 1024)
198 #define ONETWENTYEIGHT_KILO (128 * 1024)
199 #define TWOFIFTYSIX_KILO (256 * 1024)
200 #define FIVEONETWO_KILO (512 * 1024)
201 #define ONE_MEGABYTE (1024 * 1024)
202 #define TWO_MEGABYTE (2 * 1024 * 1024)
203
204 /* Sample Page Frame Numbers */
205 #define VALID_START_PFN_1 atop(0)
206 #define VALID_END_PFN_1 atop(ONE_MEGABYTE)
207 #define VALID_AVAIL_START_PFN_1 atop(0)
208 #define VALID_AVAIL_END_PFN_1 atop(ONE_MEGABYTE)
209
210 #define VALID_START_PFN_2 atop(ONE_MEGABYTE + 1)
211 #define VALID_END_PFN_2 atop(ONE_MEGABYTE * 2)
212 #define VALID_AVAIL_START_PFN_2 atop(ONE_MEGABYTE + 1)
213 #define VALID_AVAIL_END_PFN_2 atop(ONE_MEGABYTE * 2)
214
215 #define VALID_START_PFN_3 atop((ONE_MEGABYTE * 2) + 1)
216 #define VALID_END_PFN_3 atop(ONE_MEGABYTE * 3)
217 #define VALID_AVAIL_START_PFN_3 atop((ONE_MEGABYTE * 2) + 1)
218 #define VALID_AVAIL_END_PFN_3 atop(ONE_MEGABYTE * 3)
219
220 #define VALID_START_PFN_4 atop((ONE_MEGABYTE * 3) + 1)
221 #define VALID_END_PFN_4 atop(ONE_MEGABYTE * 4)
222 #define VALID_AVAIL_START_PFN_4 atop((ONE_MEGABYTE * 3) + 1)
223 #define VALID_AVAIL_END_PFN_4 atop(ONE_MEGABYTE * 4)
224
225 /*
226 * Total number of pages (of 4K size each) should be 256 for 1MB of memory.
227 */
228 #define PAGE_COUNT_1M 256
229
230 /*
231 * A debug fucntion to print the content of upm.
232 */
233 static inline void
uvm_physseg_dump_seg(uvm_physseg_t upm)234 uvm_physseg_dump_seg(uvm_physseg_t upm)
235 {
236 #if defined(DEBUG)
237 printf("%s: seg->start == %ld\n", __func__,
238 uvm_physseg_get_start(upm));
239 printf("%s: seg->end == %ld\n", __func__,
240 uvm_physseg_get_end(upm));
241 printf("%s: seg->avail_start == %ld\n", __func__,
242 uvm_physseg_get_avail_start(upm));
243 printf("%s: seg->avail_end == %ld\n", __func__,
244 uvm_physseg_get_avail_end(upm));
245
246 printf("====\n\n");
247 #else
248 return;
249 #endif /* DEBUG */
250 }
251
252 /*
253 * Private accessor that gets the value of uvm_physseg_graph.nentries
254 */
255 static int
uvm_physseg_get_entries(void)256 uvm_physseg_get_entries(void)
257 {
258 #if defined(UVM_HOTPLUG)
259 return uvm_physseg_graph.nentries;
260 #else
261 return vm_nphysmem;
262 #endif /* UVM_HOTPLUG */
263 }
264
265 #if !defined(UVM_HOTPLUG)
266 static void *
uvm_physseg_alloc(size_t sz)267 uvm_physseg_alloc(size_t sz)
268 {
269 return &vm_physmem[vm_nphysseg++];
270 }
271 #endif
272
273 /*
274 * Test Fixture SetUp().
275 */
276 static void
setup(void)277 setup(void)
278 {
279 /* Prerequisites for running certain calls in uvm_physseg */
280 uvmexp.pagesize = PAGE_SIZE;
281 uvmexp.npages = 0;
282 uvm.page_init_done = false;
283 uvm_physseg_init();
284 }
285
286
287 /* <---- Tests for Internal functions ----> */
288 #if defined(UVM_HOTPLUG)
289 ATF_TC(uvm_physseg_alloc_atboot_mismatch);
ATF_TC_HEAD(uvm_physseg_alloc_atboot_mismatch,tc)290 ATF_TC_HEAD(uvm_physseg_alloc_atboot_mismatch, tc)
291 {
292 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_alloc() sanity"
293 "size mismatch alloc() test.");
294 }
295
ATF_TC_BODY(uvm_physseg_alloc_atboot_mismatch,tc)296 ATF_TC_BODY(uvm_physseg_alloc_atboot_mismatch, tc)
297 {
298 uvm.page_init_done = false;
299
300 atf_tc_expect_signal(SIGABRT, "size mismatch alloc()");
301
302 uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1);
303 }
304
305 ATF_TC(uvm_physseg_alloc_atboot_overrun);
ATF_TC_HEAD(uvm_physseg_alloc_atboot_overrun,tc)306 ATF_TC_HEAD(uvm_physseg_alloc_atboot_overrun, tc)
307 {
308 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_alloc() sanity"
309 "array overrun alloc() test.");
310 }
311
ATF_TC_BODY(uvm_physseg_alloc_atboot_overrun,tc)312 ATF_TC_BODY(uvm_physseg_alloc_atboot_overrun, tc)
313 {
314 uvm.page_init_done = false;
315
316 atf_tc_expect_signal(SIGABRT, "array overrun alloc()");
317
318 uvm_physseg_alloc((VM_PHYSSEG_MAX + 1) * sizeof(struct uvm_physseg));
319
320 }
321
322 ATF_TC(uvm_physseg_alloc_sanity);
ATF_TC_HEAD(uvm_physseg_alloc_sanity,tc)323 ATF_TC_HEAD(uvm_physseg_alloc_sanity, tc)
324 {
325 atf_tc_set_md_var(tc, "descr", "further uvm_physseg_alloc() sanity checks");
326 }
327
ATF_TC_BODY(uvm_physseg_alloc_sanity,tc)328 ATF_TC_BODY(uvm_physseg_alloc_sanity, tc)
329 {
330
331 /* At boot time */
332 uvm.page_init_done = false;
333
334 /* Correct alloc */
335 ATF_REQUIRE(uvm_physseg_alloc(VM_PHYSSEG_MAX * sizeof(struct uvm_physseg)));
336
337 /* Retry static alloc()s as dynamic - we expect them to pass */
338 uvm.page_init_done = true;
339 ATF_REQUIRE(uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1));
340 ATF_REQUIRE(uvm_physseg_alloc(2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg)));
341 }
342
343 ATF_TC(uvm_physseg_free_atboot_mismatch);
ATF_TC_HEAD(uvm_physseg_free_atboot_mismatch,tc)344 ATF_TC_HEAD(uvm_physseg_free_atboot_mismatch, tc)
345 {
346 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_free() sanity"
347 "size mismatch free() test.");
348 }
349
ATF_TC_BODY(uvm_physseg_free_atboot_mismatch,tc)350 ATF_TC_BODY(uvm_physseg_free_atboot_mismatch, tc)
351 {
352 uvm.page_init_done = false;
353
354 atf_tc_expect_signal(SIGABRT, "size mismatch free()");
355
356 uvm_physseg_free(&uvm_physseg[0], sizeof(struct uvm_physseg) - 1);
357 }
358
359 ATF_TC(uvm_physseg_free_sanity);
ATF_TC_HEAD(uvm_physseg_free_sanity,tc)360 ATF_TC_HEAD(uvm_physseg_free_sanity, tc)
361 {
362 atf_tc_set_md_var(tc, "descr", "further uvm_physseg_free() sanity checks");
363 }
364
ATF_TC_BODY(uvm_physseg_free_sanity,tc)365 ATF_TC_BODY(uvm_physseg_free_sanity, tc)
366 {
367
368 /* At boot time */
369 uvm.page_init_done = false;
370
371 struct uvm_physseg *seg;
372
373 #if VM_PHYSSEG_MAX > 1
374 /*
375 * Note: free()ing the entire array is considered to be an
376 * error. Thus VM_PHYSSEG_MAX - 1.
377 */
378
379 seg = uvm_physseg_alloc((VM_PHYSSEG_MAX - 1) * sizeof(*seg));
380 uvm_physseg_free(seg, (VM_PHYSSEG_MAX - 1) * sizeof(struct uvm_physseg));
381 #endif
382
383 /* Retry static alloc()s as dynamic - we expect them to pass */
384 uvm.page_init_done = true;
385
386 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1);
387 uvm_physseg_free(seg, sizeof(struct uvm_physseg) - 1);
388
389 seg = uvm_physseg_alloc(2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg));
390
391 uvm_physseg_free(seg, 2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg));
392 }
393
394 #if VM_PHYSSEG_MAX > 1
395 ATF_TC(uvm_physseg_atboot_free_leak);
ATF_TC_HEAD(uvm_physseg_atboot_free_leak,tc)396 ATF_TC_HEAD(uvm_physseg_atboot_free_leak, tc)
397 {
398 atf_tc_set_md_var(tc, "descr",
399 "does free() leak at boot ?\n"
400 "This test needs VM_PHYSSEG_MAX > 1)");
401 }
402
ATF_TC_BODY(uvm_physseg_atboot_free_leak,tc)403 ATF_TC_BODY(uvm_physseg_atboot_free_leak, tc)
404 {
405
406 /* At boot time */
407 uvm.page_init_done = false;
408
409 /* alloc to array size */
410 struct uvm_physseg *seg;
411 seg = uvm_physseg_alloc(VM_PHYSSEG_MAX * sizeof(*seg));
412
413 uvm_physseg_free(seg, sizeof(*seg));
414
415 atf_tc_expect_signal(SIGABRT, "array overrun on alloc() after leak");
416
417 ATF_REQUIRE(uvm_physseg_alloc(sizeof(struct uvm_physseg)));
418 }
419 #endif /* VM_PHYSSEG_MAX */
420 #endif /* UVM_HOTPLUG */
421
422 /*
423 * Note: This function replicates verbatim what happens in
424 * uvm_page.c:uvm_page_init().
425 *
426 * Please track any changes that happen there.
427 */
428 static void
uvm_page_init_fake(struct vm_page * pagearray,psize_t pagecount)429 uvm_page_init_fake(struct vm_page *pagearray, psize_t pagecount)
430 {
431 uvm_physseg_t bank;
432 size_t n;
433
434 for (bank = uvm_physseg_get_first(),
435 uvm_physseg_seg_chomp_slab(bank, pagearray, pagecount);
436 uvm_physseg_valid_p(bank);
437 bank = uvm_physseg_get_next(bank)) {
438
439 n = uvm_physseg_get_end(bank) - uvm_physseg_get_start(bank);
440 uvm_physseg_seg_alloc_from_slab(bank, n);
441 uvm_physseg_init_seg(bank, pagearray);
442
443 /* set up page array pointers */
444 pagearray += n;
445 pagecount -= n;
446 }
447
448 uvm.page_init_done = true;
449 }
450
451 ATF_TC(uvm_physseg_plug);
ATF_TC_HEAD(uvm_physseg_plug,tc)452 ATF_TC_HEAD(uvm_physseg_plug, tc)
453 {
454 atf_tc_set_md_var(tc, "descr",
455 "Test plug functionality.");
456 }
457 /* Note: We only do the second boot time plug if VM_PHYSSEG_MAX > 1 */
ATF_TC_BODY(uvm_physseg_plug,tc)458 ATF_TC_BODY(uvm_physseg_plug, tc)
459 {
460 int nentries = 0; /* Count of entries via plug done so far */
461 uvm_physseg_t upm1;
462 #if VM_PHYSSEG_MAX > 2
463 uvm_physseg_t upm2;
464 #endif
465
466 #if VM_PHYSSEG_MAX > 1
467 uvm_physseg_t upm3;
468 #endif
469 uvm_physseg_t upm4;
470 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
471 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
472 psize_t npages3 = (VALID_END_PFN_3 - VALID_START_PFN_3);
473 psize_t npages4 = (VALID_END_PFN_4 - VALID_START_PFN_4);
474 struct vm_page *pgs, *slab = malloc(sizeof(struct vm_page) * (npages1
475 #if VM_PHYSSEG_MAX > 2
476 + npages2
477 #endif
478 + npages3));
479
480 /* Fake early boot */
481
482 setup();
483
484 /* Vanilla plug x 2 */
485 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_1, npages1, &upm1), true);
486 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
487 ATF_REQUIRE_EQ(0, uvmexp.npages);
488
489 #if VM_PHYSSEG_MAX > 2
490 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_2, npages2, &upm2), true);
491 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
492 ATF_REQUIRE_EQ(0, uvmexp.npages);
493 #endif
494 /* Post boot: Fake all segments and pages accounted for. */
495 uvm_page_init_fake(slab, npages1 + npages2 + npages3);
496
497 ATF_CHECK_EQ(npages1
498 #if VM_PHYSSEG_MAX > 2
499 + npages2
500 #endif
501 , uvmexp.npages);
502 #if VM_PHYSSEG_MAX > 1
503 /* Scavenge plug - goes into the same slab */
504 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_3, npages3, &upm3), true);
505 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
506 ATF_REQUIRE_EQ(npages1
507 #if VM_PHYSSEG_MAX > 2
508 + npages2
509 #endif
510 + npages3, uvmexp.npages);
511
512 /* Scavenge plug should fit right in the slab */
513 pgs = uvm_physseg_get_pg(upm3, 0);
514 ATF_REQUIRE(pgs > slab && pgs < (slab + npages1 + npages2 + npages3));
515 #endif
516 /* Hot plug - goes into a brand new slab */
517 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_4, npages4, &upm4), true);
518 /* The hot plug slab should have nothing to do with the original slab */
519 pgs = uvm_physseg_get_pg(upm4, 0);
520 ATF_REQUIRE(pgs < slab || pgs > (slab + npages1
521 #if VM_PHYSSEG_MAX > 2
522 + npages2
523 #endif
524 + npages3));
525
526 }
527 ATF_TC(uvm_physseg_unplug);
ATF_TC_HEAD(uvm_physseg_unplug,tc)528 ATF_TC_HEAD(uvm_physseg_unplug, tc)
529 {
530 atf_tc_set_md_var(tc, "descr",
531 "Test unplug functionality.");
532 }
ATF_TC_BODY(uvm_physseg_unplug,tc)533 ATF_TC_BODY(uvm_physseg_unplug, tc)
534 {
535 paddr_t pa = 0;
536
537 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
538 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
539 psize_t npages3 = (VALID_END_PFN_3 - VALID_START_PFN_3);
540
541 struct vm_page *slab = malloc(sizeof(struct vm_page) * (npages1 + npages2 + npages3));
542
543 uvm_physseg_t upm;
544
545 /* Boot time */
546 setup();
547
548 /* We start with zero segments */
549 ATF_REQUIRE_EQ(true, uvm_physseg_plug(atop(0), atop(ONE_MEGABYTE), NULL));
550 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
551 /* Do we have an arbitrary offset in there ? */
552 uvm_physseg_find(atop(TWOFIFTYSIX_KILO), &pa);
553 ATF_REQUIRE_EQ(pa, atop(TWOFIFTYSIX_KILO));
554 ATF_REQUIRE_EQ(0, uvmexp.npages); /* Boot time sanity */
555
556 #if VM_PHYSSEG_MAX == 1
557 /*
558 * This is the curious case at boot time, of having one
559 * extent(9) static entry per segment, which means that a
560 * fragmenting unplug will fail.
561 */
562 atf_tc_expect_signal(SIGABRT, "fragmenting unplug for single segment");
563
564 /*
565 * In order to test the fragmenting cases, please set
566 * VM_PHYSSEG_MAX > 1
567 */
568 #endif
569 /* Now let's unplug from the middle */
570 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(TWOFIFTYSIX_KILO), atop(FIVEONETWO_KILO)));
571 /* verify that a gap exists at TWOFIFTYSIX_KILO */
572 pa = 0; /* reset */
573 uvm_physseg_find(atop(TWOFIFTYSIX_KILO), &pa);
574 ATF_REQUIRE_EQ(pa, 0);
575
576 /* Post boot: Fake all segments and pages accounted for. */
577 uvm_page_init_fake(slab, npages1 + npages2 + npages3);
578 /* Account for the unplug */
579 ATF_CHECK_EQ(atop(FIVEONETWO_KILO), uvmexp.npages);
580
581 /* Original entry should fragment into two */
582 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
583
584 upm = uvm_physseg_find(atop(TWOFIFTYSIX_KILO + FIVEONETWO_KILO), NULL);
585
586 ATF_REQUIRE(uvm_physseg_valid_p(upm));
587
588 /* Now unplug the tail fragment - should swallow the complete entry */
589 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(TWOFIFTYSIX_KILO + FIVEONETWO_KILO), atop(TWOFIFTYSIX_KILO)));
590
591 /* The "swallow" above should have invalidated the handle */
592 ATF_REQUIRE_EQ(false, uvm_physseg_valid_p(upm));
593
594 /* Only the first one is left now */
595 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
596
597 /* Unplug from the back */
598 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(ONETWENTYEIGHT_KILO), atop(ONETWENTYEIGHT_KILO)));
599 /* Shouldn't change the number of segments */
600 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
601
602 /* Unplug from the front */
603 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(0, atop(SIXTYFOUR_KILO)));
604 /* Shouldn't change the number of segments */
605 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
606
607 /* Unplugging the final fragment should fail */
608 atf_tc_expect_signal(SIGABRT, "Unplugging the last segment");
609 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(SIXTYFOUR_KILO), atop(SIXTYFOUR_KILO)));
610 }
611
612
613 /* <---- end Tests for Internal functions ----> */
614
615 /* Tests for functions exported via uvm_physseg.h */
616 ATF_TC(uvm_physseg_init);
ATF_TC_HEAD(uvm_physseg_init,tc)617 ATF_TC_HEAD(uvm_physseg_init, tc)
618 {
619 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_init() call\
620 initializes the vm_physmem struct which holds the rb_tree.");
621 }
ATF_TC_BODY(uvm_physseg_init,tc)622 ATF_TC_BODY(uvm_physseg_init, tc)
623 {
624 uvm_physseg_init();
625
626 ATF_REQUIRE_EQ(0, uvm_physseg_get_entries());
627 }
628
629 ATF_TC(uvm_page_physload_preload);
ATF_TC_HEAD(uvm_page_physload_preload,tc)630 ATF_TC_HEAD(uvm_page_physload_preload, tc)
631 {
632 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
633 call works without a panic() in a preload scenario.");
634 }
ATF_TC_BODY(uvm_page_physload_preload,tc)635 ATF_TC_BODY(uvm_page_physload_preload, tc)
636 {
637 uvm_physseg_t upm;
638
639 setup();
640
641 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
642 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
643
644 /* Should return a valid handle */
645 ATF_REQUIRE(uvm_physseg_valid_p(upm));
646
647 /* No pages should be allocated yet */
648 ATF_REQUIRE_EQ(0, uvmexp.npages);
649
650 /* After the first call one segment should exist */
651 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
652
653 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
654 #if VM_PHYSSEG_MAX > 1
655 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
656 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
657
658 /* Should return a valid handle */
659 ATF_REQUIRE(uvm_physseg_valid_p(upm));
660
661 ATF_REQUIRE_EQ(0, uvmexp.npages);
662
663 /* After the second call two segments should exist */
664 ATF_CHECK_EQ(2, uvm_physseg_get_entries());
665 #endif
666 }
667
668 ATF_TC(uvm_page_physload_postboot);
ATF_TC_HEAD(uvm_page_physload_postboot,tc)669 ATF_TC_HEAD(uvm_page_physload_postboot, tc)
670 {
671 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
672 panic()s in a post boot scenario.");
673 }
ATF_TC_BODY(uvm_page_physload_postboot,tc)674 ATF_TC_BODY(uvm_page_physload_postboot, tc)
675 {
676 uvm_physseg_t upm;
677
678 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
679 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
680
681 struct vm_page *slab = malloc(sizeof(struct vm_page) * (npages1 + npages2));
682
683 setup();
684
685 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
686 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
687
688 /* Should return a valid handle */
689 ATF_REQUIRE(uvm_physseg_valid_p(upm));
690
691 /* No pages should be allocated yet */
692 ATF_REQUIRE_EQ(0, uvmexp.npages);
693
694 /* After the first call one segment should exist */
695 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
696
697 /* Post boot: Fake all segments and pages accounted for. */
698 uvm_page_init_fake(slab, npages1 + npages2);
699
700 atf_tc_expect_signal(SIGABRT,
701 "uvm_page_physload() called post boot");
702
703 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
704 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
705
706 /* Should return a valid handle */
707 ATF_REQUIRE(uvm_physseg_valid_p(upm));
708
709 ATF_REQUIRE_EQ(npages1 + npages2, uvmexp.npages);
710
711 /* After the second call two segments should exist */
712 ATF_CHECK_EQ(2, uvm_physseg_get_entries());
713 }
714
715 ATF_TC(uvm_physseg_handle_immutable);
ATF_TC_HEAD(uvm_physseg_handle_immutable,tc)716 ATF_TC_HEAD(uvm_physseg_handle_immutable, tc)
717 {
718 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_physseg_t handle is \
719 immutable.");
720 }
ATF_TC_BODY(uvm_physseg_handle_immutable,tc)721 ATF_TC_BODY(uvm_physseg_handle_immutable, tc)
722 {
723 uvm_physseg_t upm;
724
725 /* We insert the segments in out of order */
726
727 setup();
728
729 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
730 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
731
732 ATF_REQUIRE_EQ(0, uvmexp.npages);
733
734 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
735
736 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY, uvm_physseg_get_prev(upm));
737
738 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
739 #if VM_PHYSSEG_MAX > 1
740 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
741 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
742
743 ATF_REQUIRE_EQ(0, uvmexp.npages);
744
745 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
746
747 /* Fetch Previous, we inserted a lower value */
748 upm = uvm_physseg_get_prev(upm);
749
750 #if !defined(UVM_HOTPLUG)
751 /*
752 * This test is going to fail for the Array Implementation but is
753 * expected to pass in the RB Tree implementation.
754 */
755 /* Failure can be expected iff there are more than one handles */
756 atf_tc_expect_fail("Mutable handle in static array impl.");
757 #endif
758 ATF_CHECK(UVM_PHYSSEG_TYPE_INVALID_EMPTY != upm);
759 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
760 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
761 #endif
762 }
763
764 ATF_TC(uvm_physseg_seg_chomp_slab);
ATF_TC_HEAD(uvm_physseg_seg_chomp_slab,tc)765 ATF_TC_HEAD(uvm_physseg_seg_chomp_slab, tc)
766 {
767 atf_tc_set_md_var(tc, "descr", "The slab import code.()");
768
769 }
ATF_TC_BODY(uvm_physseg_seg_chomp_slab,tc)770 ATF_TC_BODY(uvm_physseg_seg_chomp_slab, tc)
771 {
772 int err;
773 size_t i;
774 struct uvm_physseg *seg;
775 struct vm_page *slab, *pgs;
776 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
777
778 setup();
779
780 /* This is boot time */
781 slab = malloc(sizeof(struct vm_page) * npages * 2);
782
783 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
784
785 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
786
787 /* Should be able to allocate two 128 * sizeof(*slab) */
788 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
789 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
790
791 #if VM_PHYSSEG_MAX == 1
792 /*
793 * free() needs an extra region descriptor, but we only have
794 * one! The classic alloc() at free() problem
795 */
796
797 ATF_REQUIRE_EQ(ENOMEM, err);
798 #else
799 /* Try alloc/free at static time */
800 for (i = 0; i < npages; i++) {
801 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
802 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
803 ATF_REQUIRE_EQ(0, err);
804 }
805 #endif
806
807 /* Now setup post boot */
808 uvm.page_init_done = true;
809
810 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
811
812 /* Try alloc/free after uvm_page.c:uvm_page_init() as well */
813 for (i = 0; i < npages; i++) {
814 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
815 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
816 ATF_REQUIRE_EQ(0, err);
817 }
818
819 }
820
821 ATF_TC(uvm_physseg_alloc_from_slab);
ATF_TC_HEAD(uvm_physseg_alloc_from_slab,tc)822 ATF_TC_HEAD(uvm_physseg_alloc_from_slab, tc)
823 {
824 atf_tc_set_md_var(tc, "descr", "The slab alloc code.()");
825
826 }
ATF_TC_BODY(uvm_physseg_alloc_from_slab,tc)827 ATF_TC_BODY(uvm_physseg_alloc_from_slab, tc)
828 {
829 struct uvm_physseg *seg;
830 struct vm_page *slab, *pgs;
831 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
832
833 setup();
834
835 /* This is boot time */
836 slab = malloc(sizeof(struct vm_page) * npages * 2);
837
838 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
839
840 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
841
842 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
843
844 ATF_REQUIRE(pgs != NULL);
845
846 /* Now setup post boot */
847 uvm.page_init_done = true;
848
849 #if VM_PHYSSEG_MAX > 1
850 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
851 ATF_REQUIRE(pgs != NULL);
852 #endif
853 atf_tc_expect_fail("alloc beyond extent");
854
855 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
856 ATF_REQUIRE(pgs != NULL);
857 }
858
859 ATF_TC(uvm_physseg_init_seg);
ATF_TC_HEAD(uvm_physseg_init_seg,tc)860 ATF_TC_HEAD(uvm_physseg_init_seg, tc)
861 {
862 atf_tc_set_md_var(tc, "descr", "Tests if uvm_physseg_init_seg adds pages to"
863 "uvmexp.npages");
864 }
ATF_TC_BODY(uvm_physseg_init_seg,tc)865 ATF_TC_BODY(uvm_physseg_init_seg, tc)
866 {
867 struct uvm_physseg *seg;
868 struct vm_page *slab, *pgs;
869 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
870
871 setup();
872
873 /* This is boot time */
874 slab = malloc(sizeof(struct vm_page) * npages * 2);
875
876 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
877
878 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
879
880 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
881
882 ATF_REQUIRE_EQ(0, uvmexp.npages);
883
884 seg->start = 0;
885 seg->end = npages;
886
887 seg->avail_start = 0;
888 seg->avail_end = npages;
889
890 uvm_physseg_init_seg(PHYSSEG_NODE_TO_HANDLE(seg), pgs);
891
892 ATF_REQUIRE_EQ(npages, uvmexp.npages);
893 }
894
895 #if 0
896 ATF_TC(uvm_physseg_init_seg);
897 ATF_TC_HEAD(uvm_physseg_init_seg, tc)
898 {
899 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
900 call works without a panic() after Segment is inited.");
901 }
902 ATF_TC_BODY(uvm_physseg_init_seg, tc)
903 {
904 uvm_physseg_t upm;
905 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
906 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
907
908 setup();
909 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
910 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
911
912 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
913
914 ATF_CHECK_EQ(0, uvmexp.npages);
915
916 /*
917 * Boot time physplug needs explicit external init,
918 * Duplicate what uvm_page.c:uvm_page_init() does.
919 * Note: not everything uvm_page_init() does gets done here.
920 * Read the source.
921 */
922 /* suck in backing slab, initialise extent. */
923 uvm_physseg_seg_chomp_slab(upm, pgs, npages);
924
925 /*
926 * Actual pgs[] allocation, from extent.
927 */
928 uvm_physseg_alloc_from_slab(upm, npages);
929
930 /* Now we initialize the segment */
931 uvm_physseg_init_seg(upm, pgs);
932
933 /* Done with boot simulation */
934 extent_init();
935 uvm.page_init_done = true;
936
937 /* We have total memory of 1MB */
938 ATF_CHECK_EQ(PAGE_COUNT_1M, uvmexp.npages);
939
940 upm =uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
941 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
942 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
943
944 /* We added another 1MB so PAGE_COUNT_1M + PAGE_COUNT_1M */
945 ATF_CHECK_EQ(PAGE_COUNT_1M + PAGE_COUNT_1M, uvmexp.npages);
946
947 }
948 #endif
949
950 ATF_TC(uvm_physseg_get_start);
ATF_TC_HEAD(uvm_physseg_get_start,tc)951 ATF_TC_HEAD(uvm_physseg_get_start, tc)
952 {
953 atf_tc_set_md_var(tc, "descr", "Tests if the start PFN is returned \
954 correctly from a segment created via uvm_page_physload().");
955 }
ATF_TC_BODY(uvm_physseg_get_start,tc)956 ATF_TC_BODY(uvm_physseg_get_start, tc)
957 {
958 uvm_physseg_t upm;
959
960 /* Fake early boot */
961 setup();
962
963 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
964 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
965
966 ATF_REQUIRE_EQ(0, uvmexp.npages);
967
968 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
969
970 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
971
972 /* This test will be triggered only if there are 2 or more segments. */
973 #if VM_PHYSSEG_MAX > 1
974 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
975 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
976
977 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
978
979 ATF_REQUIRE_EQ(0, uvmexp.npages);
980
981 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
982 #endif
983 }
984
985 ATF_TC(uvm_physseg_get_start_invalid);
ATF_TC_HEAD(uvm_physseg_get_start_invalid,tc)986 ATF_TC_HEAD(uvm_physseg_get_start_invalid, tc)
987 {
988 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
989 correctly when uvm_physseg_get_start() is called with invalid \
990 parameter values.");
991 }
ATF_TC_BODY(uvm_physseg_get_start_invalid,tc)992 ATF_TC_BODY(uvm_physseg_get_start_invalid, tc)
993 {
994 /* Check for pgs == NULL */
995 setup();
996 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
997 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
998
999 /* Force other check conditions */
1000 uvm.page_init_done = true;
1001
1002 ATF_REQUIRE_EQ(0, uvmexp.npages);
1003
1004 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1005
1006 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1007
1008 /* Invalid uvm_physseg_t */
1009 ATF_CHECK_EQ((paddr_t) -1,
1010 uvm_physseg_get_start(UVM_PHYSSEG_TYPE_INVALID));
1011 }
1012
1013 ATF_TC(uvm_physseg_get_end);
ATF_TC_HEAD(uvm_physseg_get_end,tc)1014 ATF_TC_HEAD(uvm_physseg_get_end, tc)
1015 {
1016 atf_tc_set_md_var(tc, "descr", "Tests if the end PFN is returned \
1017 correctly from a segment created via uvm_page_physload().");
1018 }
ATF_TC_BODY(uvm_physseg_get_end,tc)1019 ATF_TC_BODY(uvm_physseg_get_end, tc)
1020 {
1021 uvm_physseg_t upm;
1022
1023 setup();
1024 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1025 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1026
1027 ATF_REQUIRE_EQ(0, uvmexp.npages);
1028
1029 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1030
1031 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1032
1033 /* This test will be triggered only if there are 2 or more segments. */
1034 #if VM_PHYSSEG_MAX > 1
1035 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1036 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1037
1038 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1039
1040 ATF_REQUIRE_EQ(0, uvmexp.npages);
1041
1042 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1043 #endif
1044 }
1045
1046 ATF_TC(uvm_physseg_get_end_invalid);
ATF_TC_HEAD(uvm_physseg_get_end_invalid,tc)1047 ATF_TC_HEAD(uvm_physseg_get_end_invalid, tc)
1048 {
1049 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1050 correctly when uvm_physseg_get_end() is called with invalid \
1051 parameter values.");
1052 }
ATF_TC_BODY(uvm_physseg_get_end_invalid,tc)1053 ATF_TC_BODY(uvm_physseg_get_end_invalid, tc)
1054 {
1055 /* Check for pgs == NULL */
1056 setup();
1057 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1058 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1059
1060 /* Force other check conditions */
1061 uvm.page_init_done = true;
1062
1063 ATF_REQUIRE_EQ(0, uvmexp.npages);
1064
1065 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1066
1067 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1068
1069 /* Invalid uvm_physseg_t */
1070 ATF_CHECK_EQ((paddr_t) -1,
1071 uvm_physseg_get_end(UVM_PHYSSEG_TYPE_INVALID));
1072 }
1073
1074 ATF_TC(uvm_physseg_get_avail_start);
ATF_TC_HEAD(uvm_physseg_get_avail_start,tc)1075 ATF_TC_HEAD(uvm_physseg_get_avail_start, tc)
1076 {
1077 atf_tc_set_md_var(tc, "descr", "Tests if the avail_start PFN is \
1078 returned correctly from a segment created via uvm_page_physload().");
1079 }
ATF_TC_BODY(uvm_physseg_get_avail_start,tc)1080 ATF_TC_BODY(uvm_physseg_get_avail_start, tc)
1081 {
1082 uvm_physseg_t upm;
1083
1084 setup();
1085 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1086 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1087
1088 ATF_REQUIRE_EQ(0, uvmexp.npages);
1089
1090 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1091
1092 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1093
1094 /* This test will be triggered only if there are 2 or more segments. */
1095 #if VM_PHYSSEG_MAX > 1
1096 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1097 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1098
1099 ATF_REQUIRE_EQ(0, uvmexp.npages);
1100
1101 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1102
1103 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1104 #endif
1105 }
1106
1107 ATF_TC(uvm_physseg_get_avail_start_invalid);
ATF_TC_HEAD(uvm_physseg_get_avail_start_invalid,tc)1108 ATF_TC_HEAD(uvm_physseg_get_avail_start_invalid, tc)
1109 {
1110 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1111 correctly when uvm_physseg_get_avail_start() is called with invalid\
1112 parameter values.");
1113 }
ATF_TC_BODY(uvm_physseg_get_avail_start_invalid,tc)1114 ATF_TC_BODY(uvm_physseg_get_avail_start_invalid, tc)
1115 {
1116 /* Check for pgs == NULL */
1117 setup();
1118 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1119 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1120
1121 /* Force other check conditions */
1122 uvm.page_init_done = true;
1123
1124 ATF_REQUIRE_EQ(0, uvmexp.npages);
1125
1126 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1127
1128 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1129
1130 /* Invalid uvm_physseg_t */
1131 ATF_CHECK_EQ((paddr_t) -1,
1132 uvm_physseg_get_avail_start(UVM_PHYSSEG_TYPE_INVALID));
1133 }
1134
1135 ATF_TC(uvm_physseg_get_avail_end);
ATF_TC_HEAD(uvm_physseg_get_avail_end,tc)1136 ATF_TC_HEAD(uvm_physseg_get_avail_end, tc)
1137 {
1138 atf_tc_set_md_var(tc, "descr", "Tests if the avail_end PFN is \
1139 returned correctly from a segment created via uvm_page_physload().");
1140 }
ATF_TC_BODY(uvm_physseg_get_avail_end,tc)1141 ATF_TC_BODY(uvm_physseg_get_avail_end, tc)
1142 {
1143 uvm_physseg_t upm;
1144
1145 setup();
1146 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1147 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1148
1149 ATF_REQUIRE_EQ(0, uvmexp.npages);
1150
1151 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1152
1153 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1154
1155 /* This test will be triggered only if there are 2 or more segments. */
1156 #if VM_PHYSSEG_MAX > 1
1157 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1158 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1159
1160 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1161
1162 ATF_REQUIRE_EQ(0, uvmexp.npages);
1163
1164 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1165 #endif
1166 }
1167
1168 ATF_TC(uvm_physseg_get_avail_end_invalid);
ATF_TC_HEAD(uvm_physseg_get_avail_end_invalid,tc)1169 ATF_TC_HEAD(uvm_physseg_get_avail_end_invalid, tc)
1170 {
1171 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1172 correctly when uvm_physseg_get_avail_end() is called with invalid\
1173 parameter values.");
1174 }
ATF_TC_BODY(uvm_physseg_get_avail_end_invalid,tc)1175 ATF_TC_BODY(uvm_physseg_get_avail_end_invalid, tc)
1176 {
1177 /* Check for pgs == NULL */
1178 setup();
1179 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1180 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1181
1182 /* Force other check conditions */
1183 uvm.page_init_done = true;
1184
1185 ATF_REQUIRE_EQ(0, uvmexp.npages);
1186
1187 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1188
1189 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1190
1191 /* Invalid uvm_physseg_t */
1192 ATF_CHECK_EQ((paddr_t) -1,
1193 uvm_physseg_get_avail_end(UVM_PHYSSEG_TYPE_INVALID));
1194 }
1195
1196 ATF_TC(uvm_physseg_get_next);
ATF_TC_HEAD(uvm_physseg_get_next,tc)1197 ATF_TC_HEAD(uvm_physseg_get_next, tc)
1198 {
1199 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for next \
1200 segment using the uvm_physseg_get_next() call.");
1201 }
ATF_TC_BODY(uvm_physseg_get_next,tc)1202 ATF_TC_BODY(uvm_physseg_get_next, tc)
1203 {
1204 uvm_physseg_t upm;
1205 #if VM_PHYSSEG_MAX > 1
1206 uvm_physseg_t upm_next;
1207 #endif
1208
1209 /* We insert the segments in ascending order */
1210
1211 setup();
1212 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1213 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1214
1215 ATF_REQUIRE_EQ(0, uvmexp.npages);
1216
1217 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1218
1219 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_OVERFLOW,
1220 uvm_physseg_get_next(upm));
1221
1222 /* This test will be triggered only if there are 2 or more segments. */
1223 #if VM_PHYSSEG_MAX > 1
1224 upm_next = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1225 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1226
1227 ATF_REQUIRE_EQ(0, uvmexp.npages);
1228
1229 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1230
1231 upm = uvm_physseg_get_next(upm); /* Fetch Next */
1232
1233 ATF_CHECK_EQ(upm_next, upm);
1234 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1235 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1236 #endif
1237
1238 /* This test will be triggered only if there are 3 or more segments. */
1239 #if VM_PHYSSEG_MAX > 2
1240 upm_next = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1241 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1242
1243 ATF_REQUIRE_EQ(0, uvmexp.npages);
1244
1245 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1246
1247 upm = uvm_physseg_get_next(upm); /* Fetch Next */
1248
1249 ATF_CHECK_EQ(upm_next, upm);
1250 ATF_CHECK_EQ(VALID_START_PFN_3, uvm_physseg_get_start(upm));
1251 ATF_CHECK_EQ(VALID_END_PFN_3, uvm_physseg_get_end(upm));
1252 #endif
1253 }
1254
1255 ATF_TC(uvm_physseg_get_next_invalid);
ATF_TC_HEAD(uvm_physseg_get_next_invalid,tc)1256 ATF_TC_HEAD(uvm_physseg_get_next_invalid, tc)
1257 {
1258 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1259 correctly when uvm_physseg_get_next() is called with invalid \
1260 parameter values.");
1261 }
ATF_TC_BODY(uvm_physseg_get_next_invalid,tc)1262 ATF_TC_BODY(uvm_physseg_get_next_invalid, tc)
1263 {
1264 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID;
1265
1266 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID, uvm_physseg_get_next(upm));
1267 }
1268
1269 ATF_TC(uvm_physseg_get_prev);
ATF_TC_HEAD(uvm_physseg_get_prev,tc)1270 ATF_TC_HEAD(uvm_physseg_get_prev, tc)
1271 {
1272 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for previous \
1273 segment using the uvm_physseg_get_prev() call.");
1274 }
ATF_TC_BODY(uvm_physseg_get_prev,tc)1275 ATF_TC_BODY(uvm_physseg_get_prev, tc)
1276 {
1277 #if VM_PHYSSEG_MAX > 1
1278 uvm_physseg_t upm;
1279 #endif
1280 uvm_physseg_t upm_prev;
1281
1282
1283 setup();
1284 upm_prev = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1285 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1286
1287 ATF_REQUIRE_EQ(0, uvmexp.npages);
1288
1289 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1290
1291 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY,
1292 uvm_physseg_get_prev(upm_prev));
1293
1294 /* This test will be triggered only if there are 2 or more segments. */
1295 #if VM_PHYSSEG_MAX > 1
1296 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1297 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1298
1299 ATF_REQUIRE_EQ(0, uvmexp.npages);
1300
1301 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1302
1303 /* Fetch Previous, we inserted a lower value */
1304 upm = uvm_physseg_get_prev(upm);
1305
1306 ATF_CHECK_EQ(upm_prev, upm);
1307 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1308 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1309 #endif
1310
1311 /* This test will be triggered only if there are 3 or more segments. */
1312 #if VM_PHYSSEG_MAX > 2
1313 uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1314 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1315
1316 ATF_REQUIRE_EQ(0, uvmexp.npages);
1317
1318 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1319
1320 /*
1321 * This will return a UVM_PHYSSEG_TYPE_INVALID_EMPTY we are at the
1322 * lowest
1323 */
1324 upm = uvm_physseg_get_prev(upm);
1325
1326 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY, upm);
1327 #endif
1328 }
1329
1330 ATF_TC(uvm_physseg_get_prev_invalid);
ATF_TC_HEAD(uvm_physseg_get_prev_invalid,tc)1331 ATF_TC_HEAD(uvm_physseg_get_prev_invalid, tc)
1332 {
1333 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1334 correctly when uvm_physseg_get_prev() is called with invalid \
1335 parameter values.");
1336 }
ATF_TC_BODY(uvm_physseg_get_prev_invalid,tc)1337 ATF_TC_BODY(uvm_physseg_get_prev_invalid, tc)
1338 {
1339 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID;
1340
1341 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID, uvm_physseg_get_prev(upm));
1342 }
1343
1344 ATF_TC(uvm_physseg_get_first);
ATF_TC_HEAD(uvm_physseg_get_first,tc)1345 ATF_TC_HEAD(uvm_physseg_get_first, tc)
1346 {
1347 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for first \
1348 segment (lowest node) using the uvm_physseg_get_first() call.");
1349 }
ATF_TC_BODY(uvm_physseg_get_first,tc)1350 ATF_TC_BODY(uvm_physseg_get_first, tc)
1351 {
1352 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID_EMPTY;
1353 uvm_physseg_t upm_first;
1354
1355 /* Fake early boot */
1356 setup();
1357
1358 /* No nodes exist */
1359 ATF_CHECK_EQ(upm, uvm_physseg_get_first());
1360
1361 upm_first = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1362 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1363
1364 ATF_REQUIRE_EQ(0, uvmexp.npages);
1365
1366 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1367
1368 /* Pointer to first should be the least valued node */
1369 upm = uvm_physseg_get_first();
1370 ATF_CHECK_EQ(upm_first, upm);
1371 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1372 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1373 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1374 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1375
1376 /* This test will be triggered only if there are 2 or more segments. */
1377 #if VM_PHYSSEG_MAX > 1
1378 /* Insert a node of lesser value */
1379 upm_first = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1380 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1381
1382 ATF_CHECK_EQ(0, uvmexp.npages);
1383
1384 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1385
1386 /* Pointer to first should be the least valued node */
1387 upm = uvm_physseg_get_first();
1388 ATF_CHECK_EQ(upm_first, upm);
1389 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1390 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1391 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1392 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1393 #endif
1394
1395 /* This test will be triggered only if there are 3 or more segments. */
1396 #if VM_PHYSSEG_MAX > 2
1397 /* Insert a node of higher value */
1398 upm_first =uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1399 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1400
1401 ATF_CHECK_EQ(0, uvmexp.npages);
1402
1403 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1404
1405 /* Pointer to first should be the least valued node */
1406 upm = uvm_physseg_get_first();
1407 ATF_CHECK(upm_first != upm);
1408 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1409 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1410 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1411 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1412 #endif
1413 }
1414
1415 ATF_TC(uvm_physseg_get_last);
ATF_TC_HEAD(uvm_physseg_get_last,tc)1416 ATF_TC_HEAD(uvm_physseg_get_last, tc)
1417 {
1418 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for last \
1419 segment using the uvm_physseg_get_last() call.");
1420 }
ATF_TC_BODY(uvm_physseg_get_last,tc)1421 ATF_TC_BODY(uvm_physseg_get_last, tc)
1422 {
1423 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID_EMPTY;
1424 uvm_physseg_t upm_last;
1425
1426 setup();
1427
1428 /* No nodes exist */
1429 ATF_CHECK_EQ(upm, uvm_physseg_get_last());
1430
1431 upm_last = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1432 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1433
1434 ATF_REQUIRE_EQ(0, uvmexp.npages);
1435
1436 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1437
1438 /* Pointer to last should be the most valued node */
1439 upm = uvm_physseg_get_last();
1440 ATF_CHECK_EQ(upm_last, upm);
1441 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1442 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1443 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1444 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1445
1446 /* This test will be triggered only if there are 2 or more segments. */
1447 #if VM_PHYSSEG_MAX > 1
1448 /* Insert node of greater value */
1449 upm_last = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1450 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1451
1452 ATF_REQUIRE_EQ(0, uvmexp.npages);
1453
1454 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1455
1456 /* Pointer to last should be the most valued node */
1457 upm = uvm_physseg_get_last();
1458 ATF_CHECK_EQ(upm_last, upm);
1459 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1460 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1461 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1462 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1463 #endif
1464
1465 /* This test will be triggered only if there are 3 or more segments. */
1466 #if VM_PHYSSEG_MAX > 2
1467 /* Insert node of greater value */
1468 upm_last = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1469 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1470
1471 ATF_REQUIRE_EQ(0, uvmexp.npages);
1472
1473 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1474
1475 /* Pointer to last should be the most valued node */
1476 upm = uvm_physseg_get_last();
1477 ATF_CHECK_EQ(upm_last, upm);
1478 ATF_CHECK_EQ(VALID_START_PFN_3, uvm_physseg_get_start(upm));
1479 ATF_CHECK_EQ(VALID_END_PFN_3, uvm_physseg_get_end(upm));
1480 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_3, uvm_physseg_get_avail_start(upm));
1481 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3, uvm_physseg_get_avail_end(upm));
1482 #endif
1483 }
1484
1485 ATF_TC(uvm_physseg_valid);
ATF_TC_HEAD(uvm_physseg_valid,tc)1486 ATF_TC_HEAD(uvm_physseg_valid, tc)
1487 {
1488 atf_tc_set_md_var(tc, "descr", "Tests the pointer value for current \
1489 segment is valid using the uvm_physseg_valid_p() call.");
1490 }
ATF_TC_BODY(uvm_physseg_valid,tc)1491 ATF_TC_BODY(uvm_physseg_valid, tc)
1492 {
1493 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1494
1495 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1496
1497 uvm_physseg_t upm;
1498
1499 setup();
1500 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1501 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1502
1503 ATF_REQUIRE_EQ(0, uvmexp.npages);
1504
1505 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1506
1507 uvm_physseg_init_seg(upm, pgs);
1508
1509 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1510
1511 ATF_CHECK_EQ(true, uvm_physseg_valid_p(upm));
1512 }
1513
1514 ATF_TC(uvm_physseg_valid_invalid);
ATF_TC_HEAD(uvm_physseg_valid_invalid,tc)1515 ATF_TC_HEAD(uvm_physseg_valid_invalid, tc)
1516 {
1517 atf_tc_set_md_var(tc, "descr", "Tests the pointer value for current \
1518 segment is invalid using the uvm_physseg_valid_p() call.");
1519 }
ATF_TC_BODY(uvm_physseg_valid_invalid,tc)1520 ATF_TC_BODY(uvm_physseg_valid_invalid, tc)
1521 {
1522 uvm_physseg_t upm;
1523
1524 setup();
1525 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1526 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1527
1528 /* Force other check conditions */
1529 uvm.page_init_done = true;
1530
1531 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1532
1533 /* Invalid uvm_physseg_t */
1534 ATF_CHECK_EQ(false, uvm_physseg_valid_p(UVM_PHYSSEG_TYPE_INVALID));
1535
1536 /*
1537 * Without any pages initialized for segment, it is considered
1538 * invalid
1539 */
1540 ATF_CHECK_EQ(false, uvm_physseg_valid_p(upm));
1541 }
1542
1543 ATF_TC(uvm_physseg_get_highest);
ATF_TC_HEAD(uvm_physseg_get_highest,tc)1544 ATF_TC_HEAD(uvm_physseg_get_highest, tc)
1545 {
1546 atf_tc_set_md_var(tc, "descr", "Tests if the returned PFN matches \
1547 the highest PFN in use by the system.");
1548 }
ATF_TC_BODY(uvm_physseg_get_highest,tc)1549 ATF_TC_BODY(uvm_physseg_get_highest, tc)
1550 {
1551 setup();
1552 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1553 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1554
1555 /* Only one segment so highest is the current */
1556 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1 - 1, uvm_physseg_get_highest_frame());
1557
1558 /* This test will be triggered only if there are 2 or more segments. */
1559 #if VM_PHYSSEG_MAX > 1
1560 uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1561 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1562
1563 /* PFN_3 > PFN_1 */
1564 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3 - 1, uvm_physseg_get_highest_frame());
1565 #endif
1566
1567 /* This test will be triggered only if there are 3 or more segments. */
1568 #if VM_PHYSSEG_MAX > 2
1569 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1570 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1571
1572 /* PFN_3 > PFN_2 */
1573 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3 - 1, uvm_physseg_get_highest_frame());
1574 #endif
1575 }
1576
1577 ATF_TC(uvm_physseg_get_free_list);
ATF_TC_HEAD(uvm_physseg_get_free_list,tc)1578 ATF_TC_HEAD(uvm_physseg_get_free_list, tc)
1579 {
1580 atf_tc_set_md_var(tc, "descr", "Tests if the returned Free List type \
1581 of a segment matches the one returned from \
1582 uvm_physseg_get_free_list() call.");
1583 }
ATF_TC_BODY(uvm_physseg_get_free_list,tc)1584 ATF_TC_BODY(uvm_physseg_get_free_list, tc)
1585 {
1586 uvm_physseg_t upm;
1587
1588 /* Fake early boot */
1589 setup();
1590
1591 /* Insertions are made in ascending order */
1592 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1593 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1594
1595 ATF_CHECK_EQ(VM_FREELIST_DEFAULT, uvm_physseg_get_free_list(upm));
1596
1597 /* This test will be triggered only if there are 2 or more segments. */
1598 #if VM_PHYSSEG_MAX > 1
1599 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1600 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_FIRST16);
1601
1602 ATF_CHECK_EQ(VM_FREELIST_FIRST16, uvm_physseg_get_free_list(upm));
1603 #endif
1604
1605 /* This test will be triggered only if there are 3 or more segments. */
1606 #if VM_PHYSSEG_MAX > 2
1607 upm = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1608 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_FIRST1G);
1609
1610 ATF_CHECK_EQ(VM_FREELIST_FIRST1G, uvm_physseg_get_free_list(upm));
1611 #endif
1612 }
1613
1614 ATF_TC(uvm_physseg_get_start_hint);
ATF_TC_HEAD(uvm_physseg_get_start_hint,tc)1615 ATF_TC_HEAD(uvm_physseg_get_start_hint, tc)
1616 {
1617 atf_tc_set_md_var(tc, "descr", "Tests if the returned start_hint value \
1618 of a segment matches the one returned from \
1619 uvm_physseg_get_start_hint() call.");
1620 }
ATF_TC_BODY(uvm_physseg_get_start_hint,tc)1621 ATF_TC_BODY(uvm_physseg_get_start_hint, tc)
1622 {
1623 uvm_physseg_t upm;
1624
1625 setup();
1626 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1627 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1628
1629 /* Will be Zero since no specific value is set during init */
1630 ATF_CHECK_EQ(0, uvm_physseg_get_start_hint(upm));
1631 }
1632
1633 ATF_TC(uvm_physseg_set_start_hint);
ATF_TC_HEAD(uvm_physseg_set_start_hint,tc)1634 ATF_TC_HEAD(uvm_physseg_set_start_hint, tc)
1635 {
1636 atf_tc_set_md_var(tc, "descr", "Tests if the returned start_hint value \
1637 of a segment matches the one set by the \
1638 uvm_physseg_set_start_hint() call.");
1639 }
ATF_TC_BODY(uvm_physseg_set_start_hint,tc)1640 ATF_TC_BODY(uvm_physseg_set_start_hint, tc)
1641 {
1642 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1643
1644 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1645
1646 uvm_physseg_t upm;
1647
1648 setup();
1649 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1650 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1651
1652 uvm_physseg_init_seg(upm, pgs);
1653
1654 ATF_CHECK_EQ(true, uvm_physseg_set_start_hint(upm, atop(128)));
1655
1656 /* Will be atop(128) since no specific value is set above */
1657 ATF_CHECK_EQ(atop(128), uvm_physseg_get_start_hint(upm));
1658 }
1659
1660 ATF_TC(uvm_physseg_set_start_hint_invalid);
ATF_TC_HEAD(uvm_physseg_set_start_hint_invalid,tc)1661 ATF_TC_HEAD(uvm_physseg_set_start_hint_invalid, tc)
1662 {
1663 atf_tc_set_md_var(tc, "descr", "Tests if the returned value is false \
1664 when an invalid segment matches the one trying to set by the \
1665 uvm_physseg_set_start_hint() call.");
1666 }
ATF_TC_BODY(uvm_physseg_set_start_hint_invalid,tc)1667 ATF_TC_BODY(uvm_physseg_set_start_hint_invalid, tc)
1668 {
1669 uvm_physseg_t upm;
1670
1671 setup();
1672 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1673 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1674
1675 /* Force other check conditions */
1676 uvm.page_init_done = true;
1677
1678 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1679
1680 ATF_CHECK_EQ(false, uvm_physseg_set_start_hint(upm, atop(128)));
1681
1682 /*
1683 * Will be Zero since no specific value is set after the init
1684 * due to failure
1685 */
1686 atf_tc_expect_signal(SIGABRT, "invalid uvm_physseg_t handle");
1687
1688 ATF_CHECK_EQ(0, uvm_physseg_get_start_hint(upm));
1689 }
1690
1691 ATF_TC(uvm_physseg_get_pg);
ATF_TC_HEAD(uvm_physseg_get_pg,tc)1692 ATF_TC_HEAD(uvm_physseg_get_pg, tc)
1693 {
1694 atf_tc_set_md_var(tc, "descr", "Tests if the returned vm_page struct \
1695 is correct when fetched by uvm_physseg_get_pg() call.");
1696 }
ATF_TC_BODY(uvm_physseg_get_pg,tc)1697 ATF_TC_BODY(uvm_physseg_get_pg, tc)
1698 {
1699 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1700
1701 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1702
1703 struct vm_page *extracted_pg = NULL;
1704
1705 uvm_physseg_t upm;
1706
1707 setup();
1708 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1709 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1710
1711 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1712
1713 ATF_REQUIRE_EQ(0, uvmexp.npages);
1714
1715 /* Now we initialize the segment */
1716 uvm_physseg_init_seg(upm, pgs);
1717
1718 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1719
1720 ATF_REQUIRE_EQ(NULL, extracted_pg);
1721
1722 /* Try fetching the 5th Page in the Segment */
1723 extracted_pg = uvm_physseg_get_pg(upm, 5);
1724
1725 /* Values of phys_addr is n * PAGE_SIZE where n is the page number */
1726 ATF_CHECK_EQ(5 * PAGE_SIZE, extracted_pg->phys_addr);
1727
1728 /* Try fetching the 113th Page in the Segment */
1729 extracted_pg = uvm_physseg_get_pg(upm, 113);
1730
1731 ATF_CHECK_EQ(113 * PAGE_SIZE, extracted_pg->phys_addr);
1732 }
1733
1734 #ifdef __HAVE_PMAP_PHYSSEG
1735 ATF_TC(uvm_physseg_get_pmseg);
ATF_TC_HEAD(uvm_physseg_get_pmseg,tc)1736 ATF_TC_HEAD(uvm_physseg_get_pmseg, tc)
1737 {
1738 atf_tc_set_md_var(tc, "descr", "Tests if the returned pmap_physseg \
1739 struct is correct when fetched by uvm_physseg_get_pmseg() call.");
1740 }
ATF_TC_BODY(uvm_physseg_get_pmseg,tc)1741 ATF_TC_BODY(uvm_physseg_get_pmseg, tc)
1742 {
1743 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1744
1745 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1746
1747 struct pmap_physseg pmseg = { true };
1748
1749 struct pmap_physseg *extracted_pmseg = NULL;
1750
1751 uvm_physseg_t upm;
1752
1753 setup();
1754 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1755 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1756
1757 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1758
1759 ATF_REQUIRE_EQ(0, uvmexp.npages);
1760
1761 /* Now we initialize the segment */
1762 uvm_physseg_init_seg(upm, pgs);
1763
1764 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1765
1766 ATF_REQUIRE_EQ(NULL, extracted_pmseg);
1767
1768 ATF_REQUIRE_EQ(true, pmseg.dummy_variable);
1769
1770 /* Extract the current pmseg */
1771 extracted_pmseg = uvm_physseg_get_pmseg(upm);
1772
1773 /*
1774 * We can only check if it is not NULL
1775 * We do not know the value it contains
1776 */
1777 ATF_CHECK(NULL != extracted_pmseg);
1778
1779 extracted_pmseg->dummy_variable = pmseg.dummy_variable;
1780
1781 /* Invert value to ensure test integrity */
1782 pmseg.dummy_variable = false;
1783
1784 ATF_REQUIRE_EQ(false, pmseg.dummy_variable);
1785
1786 extracted_pmseg = uvm_physseg_get_pmseg(upm);
1787
1788 ATF_CHECK(NULL != extracted_pmseg);
1789
1790 ATF_CHECK_EQ(true, extracted_pmseg->dummy_variable);
1791 }
1792 #endif
1793
1794 ATF_TC(vm_physseg_find);
ATF_TC_HEAD(vm_physseg_find,tc)1795 ATF_TC_HEAD(vm_physseg_find, tc)
1796 {
1797 atf_tc_set_md_var(tc, "descr", "Tests if the returned segment number \
1798 is correct when an PFN is passed into uvm_physseg_find() call. \
1799 In addition to this the offset of the PFN from the start of \
1800 segment is also set if the parameter is passed in as not NULL.");
1801 }
ATF_TC_BODY(vm_physseg_find,tc)1802 ATF_TC_BODY(vm_physseg_find, tc)
1803 {
1804 psize_t offset = (psize_t) -1;
1805
1806 uvm_physseg_t upm_first, result;
1807 #if VM_PHYSSEG_MAX > 1
1808 uvm_physseg_t upm_second;
1809 #endif
1810
1811 setup();
1812
1813 upm_first = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1814 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1815
1816 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1817
1818 ATF_REQUIRE_EQ(0, uvmexp.npages);
1819
1820 /* This test will be triggered only if there are 2 or more segments. */
1821 #if VM_PHYSSEG_MAX > 1
1822 upm_second = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1823 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1824
1825 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1826
1827 ATF_REQUIRE_EQ(0, uvmexp.npages);
1828 #endif
1829
1830 /* Under ONE_MEGABYTE is segment upm_first */
1831 result = uvm_physseg_find(atop(ONE_MEGABYTE - 1024), NULL);
1832 ATF_CHECK_EQ(upm_first, result);
1833 ATF_CHECK_EQ(uvm_physseg_get_start(upm_first),
1834 uvm_physseg_get_start(result));
1835 ATF_CHECK_EQ(uvm_physseg_get_end(upm_first),
1836 uvm_physseg_get_end(result));
1837 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_first),
1838 uvm_physseg_get_avail_start(result));
1839 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_first),
1840 uvm_physseg_get_avail_end(result));
1841
1842 ATF_REQUIRE_EQ((psize_t) -1, offset);
1843
1844 /* This test will be triggered only if there are 2 or more segments. */
1845 #if VM_PHYSSEG_MAX > 1
1846 /* Over ONE_MEGABYTE is segment upm_second */
1847 result = uvm_physseg_find(atop(ONE_MEGABYTE + 8192), &offset);
1848 ATF_CHECK_EQ(upm_second, result);
1849 ATF_CHECK_EQ(uvm_physseg_get_start(upm_second),
1850 uvm_physseg_get_start(result));
1851 ATF_CHECK_EQ(uvm_physseg_get_end(upm_second),
1852 uvm_physseg_get_end(result));
1853 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_second),
1854 uvm_physseg_get_avail_start(result));
1855 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_second),
1856 uvm_physseg_get_avail_end(result));
1857
1858 /* Offset is calculated based on PAGE_SIZE */
1859 /* atop(ONE_MEGABYTE + (2 * PAGE_SIZE)) - VALID_START_PFN1 = 2 */
1860 ATF_CHECK_EQ(2, offset);
1861 #else
1862 /* Under ONE_MEGABYTE is segment upm_first */
1863 result = uvm_physseg_find(atop(ONE_MEGABYTE - 12288), &offset);
1864 ATF_CHECK_EQ(upm_first, result);
1865 ATF_CHECK_EQ(uvm_physseg_get_start(upm_first),
1866 uvm_physseg_get_start(result));
1867 ATF_CHECK_EQ(uvm_physseg_get_end(upm_first),
1868 uvm_physseg_get_end(result));
1869 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_first),
1870 uvm_physseg_get_avail_start(result));
1871 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_first),
1872 uvm_physseg_get_avail_end(result));
1873
1874 /* Offset is calculated based on PAGE_SIZE */
1875 /* atop(ONE_MEGABYTE - (3 * PAGE_SIZE)) - VALID_START_PFN1 = 253 */
1876 ATF_CHECK_EQ(253, offset);
1877 #endif
1878 }
1879
1880 ATF_TC(vm_physseg_find_invalid);
ATF_TC_HEAD(vm_physseg_find_invalid,tc)1881 ATF_TC_HEAD(vm_physseg_find_invalid, tc)
1882 {
1883 atf_tc_set_md_var(tc, "descr", "Tests if the returned segment number \
1884 is (paddr_t) -1 when a non existant PFN is passed into \
1885 uvm_physseg_find() call.");
1886 }
ATF_TC_BODY(vm_physseg_find_invalid,tc)1887 ATF_TC_BODY(vm_physseg_find_invalid, tc)
1888 {
1889 psize_t offset = (psize_t) -1;
1890
1891 setup();
1892 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1893 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1894
1895 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1896
1897 ATF_REQUIRE_EQ(0, uvmexp.npages);
1898
1899 /* No segments over 3 MB exists at the moment */
1900 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID,
1901 uvm_physseg_find(atop(ONE_MEGABYTE * 3), NULL));
1902
1903 ATF_REQUIRE_EQ((psize_t) -1, offset);
1904
1905 /* No segments over 3 MB exists at the moment */
1906 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID,
1907 uvm_physseg_find(atop(ONE_MEGABYTE * 3), &offset));
1908
1909 ATF_CHECK_EQ((psize_t) -1, offset);
1910 }
1911
1912 ATF_TC(uvm_page_physunload_start);
ATF_TC_HEAD(uvm_page_physunload_start,tc)1913 ATF_TC_HEAD(uvm_page_physunload_start, tc)
1914 {
1915 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
1916 call works without a panic(). Unloads from Start of the segment.");
1917 }
ATF_TC_BODY(uvm_page_physunload_start,tc)1918 ATF_TC_BODY(uvm_page_physunload_start, tc)
1919 {
1920 /*
1921 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
1922 */
1923 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
1924
1925 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1926
1927 paddr_t p = 0;
1928
1929 uvm_physseg_t upm;
1930
1931 setup();
1932 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1933 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1934
1935 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1936
1937 ATF_REQUIRE_EQ(0, uvmexp.npages);
1938
1939 uvm_physseg_init_seg(upm, pgs);
1940
1941 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
1942
1943 /*
1944 * When called for first time, uvm_page_physload() removes the first PFN
1945 *
1946 * New avail start will be VALID_AVAIL_START_PFN_2 + 1
1947 */
1948 ATF_CHECK_EQ(VALID_START_PFN_2, atop(p));
1949
1950 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
1951 uvm_physseg_get_avail_start(upm));
1952
1953 ATF_CHECK_EQ(VALID_START_PFN_2 + 1, uvm_physseg_get_start(upm));
1954
1955 /* Rest of the stuff should remain the same */
1956 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1957 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1958 }
1959
1960 ATF_TC(uvm_page_physunload_end);
ATF_TC_HEAD(uvm_page_physunload_end,tc)1961 ATF_TC_HEAD(uvm_page_physunload_end, tc)
1962 {
1963 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
1964 call works without a panic(). Unloads from End of the segment.");
1965 }
ATF_TC_BODY(uvm_page_physunload_end,tc)1966 ATF_TC_BODY(uvm_page_physunload_end, tc)
1967 {
1968 /*
1969 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
1970 */
1971 paddr_t p = 0;
1972
1973 uvm_physseg_t upm;
1974
1975 setup();
1976 /* Note: start != avail_start to remove from end. */
1977 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1978 VALID_AVAIL_START_PFN_2 + 1, VALID_AVAIL_END_PFN_2,
1979 VM_FREELIST_DEFAULT);
1980
1981 p = 0;
1982
1983 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1984
1985 ATF_REQUIRE_EQ(0, uvmexp.npages);
1986
1987 ATF_REQUIRE(
1988 uvm_physseg_get_avail_start(upm) != uvm_physseg_get_start(upm));
1989
1990 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
1991
1992 /*
1993 * Remember if X is the upper limit the actual valid pointer is X - 1
1994 *
1995 * For example if 256 is the upper limit for 1MB memory, last valid
1996 * pointer is 256 - 1 = 255
1997 */
1998
1999 ATF_CHECK_EQ(VALID_END_PFN_2 - 1, atop(p));
2000
2001 /*
2002 * When called for second time, uvm_page_physload() removes the last PFN
2003 *
2004 * New avail end will be VALID_AVAIL_END_PFN_2 - 1
2005 * New end will be VALID_AVAIL_PFN_2 - 1
2006 */
2007
2008 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2 - 1, uvm_physseg_get_avail_end(upm));
2009
2010 ATF_CHECK_EQ(VALID_END_PFN_2 - 1, uvm_physseg_get_end(upm));
2011
2012 /* Rest of the stuff should remain the same */
2013 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
2014 uvm_physseg_get_avail_start(upm));
2015 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2016 }
2017
2018 ATF_TC(uvm_page_physunload_none);
ATF_TC_HEAD(uvm_page_physunload_none,tc)2019 ATF_TC_HEAD(uvm_page_physunload_none, tc)
2020 {
2021 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
2022 call works without a panic(). Does not unload from start or end \
2023 because of non-aligned start / avail_start and end / avail_end \
2024 respectively.");
2025 }
ATF_TC_BODY(uvm_page_physunload_none,tc)2026 ATF_TC_BODY(uvm_page_physunload_none, tc)
2027 {
2028 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
2029
2030 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
2031
2032 paddr_t p = 0;
2033
2034 uvm_physseg_t upm;
2035
2036 setup();
2037 /*
2038 * Note: start != avail_start and end != avail_end.
2039 *
2040 * This prevents any unload from occuring.
2041 */
2042 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2043 VALID_AVAIL_START_PFN_2 + 1, VALID_AVAIL_END_PFN_2 - 1,
2044 VM_FREELIST_DEFAULT);
2045
2046 p = 0;
2047
2048 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2049
2050 ATF_REQUIRE_EQ(0, uvmexp.npages);
2051
2052 ATF_REQUIRE(
2053 uvm_physseg_get_avail_start(upm) != uvm_physseg_get_start(upm));
2054
2055 uvm_physseg_init_seg(upm, pgs);
2056
2057 ATF_CHECK_EQ(false, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2058
2059 /* uvm_page_physload() will no longer unload memory */
2060 ATF_CHECK_EQ(0, p);
2061
2062 /* Rest of the stuff should remain the same */
2063 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
2064 uvm_physseg_get_avail_start(upm));
2065 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2 - 1,
2066 uvm_physseg_get_avail_end(upm));
2067 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2068 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2069 }
2070
2071 ATF_TC(uvm_page_physunload_delete_start);
ATF_TC_HEAD(uvm_page_physunload_delete_start,tc)2072 ATF_TC_HEAD(uvm_page_physunload_delete_start, tc)
2073 {
2074 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2075 works when the segment gets small enough to be deleted scenario. \
2076 NOTE: This one works deletes from start.");
2077 }
ATF_TC_BODY(uvm_page_physunload_delete_start,tc)2078 ATF_TC_BODY(uvm_page_physunload_delete_start, tc)
2079 {
2080 /*
2081 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2082 */
2083 paddr_t p = 0;
2084
2085 uvm_physseg_t upm;
2086
2087 setup();
2088
2089 /*
2090 * Setup the Nuke from Starting point
2091 */
2092
2093 upm = uvm_page_physload(VALID_END_PFN_1 - 1, VALID_END_PFN_1,
2094 VALID_AVAIL_END_PFN_1 - 1, VALID_AVAIL_END_PFN_1,
2095 VM_FREELIST_DEFAULT);
2096
2097 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2098
2099 ATF_REQUIRE_EQ(0, uvmexp.npages);
2100
2101 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2102 #if VM_PHYSSEG_MAX > 1
2103 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2104 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2105
2106 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2107 #endif
2108
2109 #if VM_PHYSSEG_MAX == 1
2110 atf_tc_expect_signal(SIGABRT,
2111 "cannot uvm_page_physunload() the last segment");
2112 #endif
2113
2114 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2115
2116 ATF_CHECK_EQ(VALID_END_PFN_1 - 1, atop(p));
2117
2118 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2119
2120 /* The only node now is the one we inserted second. */
2121 upm = uvm_physseg_get_first();
2122
2123 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2124 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2125 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
2126 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
2127 }
2128
2129 ATF_TC(uvm_page_physunload_delete_end);
ATF_TC_HEAD(uvm_page_physunload_delete_end,tc)2130 ATF_TC_HEAD(uvm_page_physunload_delete_end, tc)
2131 {
2132 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2133 works when the segment gets small enough to be deleted scenario. \
2134 NOTE: This one works deletes from end.");
2135 }
ATF_TC_BODY(uvm_page_physunload_delete_end,tc)2136 ATF_TC_BODY(uvm_page_physunload_delete_end, tc)
2137 {
2138 /*
2139 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2140 */
2141
2142 paddr_t p = 0;
2143
2144 uvm_physseg_t upm;
2145
2146 setup();
2147
2148 /*
2149 * Setup the Nuke from Ending point
2150 */
2151
2152 upm = uvm_page_physload(VALID_START_PFN_1, VALID_START_PFN_1 + 2,
2153 VALID_AVAIL_START_PFN_1 + 1, VALID_AVAIL_START_PFN_1 + 2,
2154 VM_FREELIST_DEFAULT);
2155
2156 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2157
2158 ATF_REQUIRE_EQ(0, uvmexp.npages);
2159
2160 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2161 #if VM_PHYSSEG_MAX > 1
2162 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2163 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2164
2165 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2166 #endif
2167
2168 #if VM_PHYSSEG_MAX == 1
2169 atf_tc_expect_signal(SIGABRT,
2170 "cannot uvm_page_physunload() the last segment");
2171 #endif
2172
2173 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2174
2175 p = 0;
2176
2177 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2178
2179 ATF_CHECK_EQ(VALID_START_PFN_1 + 2, atop(p));
2180
2181 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2182
2183 /* The only node now is the one we inserted second. */
2184 upm = uvm_physseg_get_first();
2185
2186 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2187 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2188 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
2189 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
2190 }
2191
2192 ATF_TC(uvm_page_physunload_invalid);
ATF_TC_HEAD(uvm_page_physunload_invalid,tc)2193 ATF_TC_HEAD(uvm_page_physunload_invalid, tc)
2194 {
2195 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2196 fails when then Free list does not match.");
2197 }
ATF_TC_BODY(uvm_page_physunload_invalid,tc)2198 ATF_TC_BODY(uvm_page_physunload_invalid, tc)
2199 {
2200 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
2201
2202 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
2203
2204 paddr_t p = 0;
2205
2206 uvm_physseg_t upm;
2207
2208 setup();
2209 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2210 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2211
2212 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2213
2214 ATF_REQUIRE_EQ(0, uvmexp.npages);
2215
2216 uvm_physseg_init_seg(upm, pgs);
2217
2218 ATF_CHECK_EQ(false, uvm_page_physunload(upm, VM_FREELIST_FIRST4G, &p));
2219 }
2220
2221 ATF_TC(uvm_page_physunload_force);
ATF_TC_HEAD(uvm_page_physunload_force,tc)2222 ATF_TC_HEAD(uvm_page_physunload_force, tc)
2223 {
2224 atf_tc_set_md_var(tc, "descr", "Tests if the basic \
2225 uvm_page_physunload_force() including delete works without.");
2226 }
ATF_TC_BODY(uvm_page_physunload_force,tc)2227 ATF_TC_BODY(uvm_page_physunload_force, tc)
2228 {
2229 /*
2230 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2231 */
2232 paddr_t p = 0;
2233
2234 uvm_physseg_t upm;
2235
2236 setup();
2237 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
2238 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
2239
2240 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2241
2242 ATF_REQUIRE_EQ(0, uvmexp.npages);
2243
2244 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2245 #if VM_PHYSSEG_MAX > 1
2246 /*
2247 * We have couple of physloads done this is bacause of the fact that if
2248 * we physunload all the PFs from a given range and we have only one
2249 * segment in total a panic() is called
2250 */
2251 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2252 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2253
2254 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2255 #endif
2256
2257 #if VM_PHYSSEG_MAX == 1
2258 atf_tc_expect_signal(SIGABRT,
2259 "cannot uvm_page_physunload() the last segment");
2260 #endif
2261
2262 ATF_REQUIRE_EQ(VALID_AVAIL_START_PFN_1,
2263 uvm_physseg_get_avail_start(upm));
2264
2265 for(paddr_t i = VALID_AVAIL_START_PFN_1;
2266 i < VALID_AVAIL_END_PFN_1; i++) {
2267 ATF_CHECK_EQ(true,
2268 uvm_page_physunload_force(upm, VM_FREELIST_DEFAULT, &p));
2269 ATF_CHECK_EQ(i, atop(p));
2270
2271 if(i + 1 < VALID_AVAIL_END_PFN_1)
2272 ATF_CHECK_EQ(i + 1, uvm_physseg_get_avail_start(upm));
2273 }
2274
2275 /*
2276 * Now we try to retrieve the segment, which has been removed
2277 * from the system through force unloading all the pages inside it.
2278 */
2279 upm = uvm_physseg_find(VALID_AVAIL_END_PFN_1 - 1, NULL);
2280
2281 /* It should no longer exist */
2282 ATF_CHECK_EQ(NULL, upm);
2283
2284 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2285 }
2286
2287 ATF_TC(uvm_page_physunload_force_invalid);
ATF_TC_HEAD(uvm_page_physunload_force_invalid,tc)2288 ATF_TC_HEAD(uvm_page_physunload_force_invalid, tc)
2289 {
2290 atf_tc_set_md_var(tc, "descr", "Tests if the invalid conditions for \
2291 uvm_page_physunload_force_invalid().");
2292 }
ATF_TC_BODY(uvm_page_physunload_force_invalid,tc)2293 ATF_TC_BODY(uvm_page_physunload_force_invalid, tc)
2294 {
2295 paddr_t p = 0;
2296
2297 uvm_physseg_t upm;
2298
2299 setup();
2300 upm = uvm_page_physload(VALID_START_PFN_2, VALID_START_PFN_2+ 1,
2301 VALID_START_PFN_2, VALID_START_PFN_2, VM_FREELIST_DEFAULT);
2302
2303 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2304
2305 ATF_REQUIRE_EQ(0, uvmexp.npages);
2306
2307 ATF_CHECK_EQ(false,
2308 uvm_page_physunload_force(upm, VM_FREELIST_DEFAULT, &p));
2309
2310 ATF_CHECK_EQ(0, p);
2311 }
2312
ATF_TP_ADD_TCS(tp)2313 ATF_TP_ADD_TCS(tp)
2314 {
2315 #if defined(UVM_HOTPLUG)
2316 /* Internal */
2317 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_atboot_mismatch);
2318 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_atboot_overrun);
2319 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_sanity);
2320 ATF_TP_ADD_TC(tp, uvm_physseg_free_atboot_mismatch);
2321 ATF_TP_ADD_TC(tp, uvm_physseg_free_sanity);
2322 #if VM_PHYSSEG_MAX > 1
2323 ATF_TP_ADD_TC(tp, uvm_physseg_atboot_free_leak);
2324 #endif
2325 #endif /* UVM_HOTPLUG */
2326
2327 ATF_TP_ADD_TC(tp, uvm_physseg_plug);
2328 ATF_TP_ADD_TC(tp, uvm_physseg_unplug);
2329
2330 /* Exported */
2331 ATF_TP_ADD_TC(tp, uvm_physseg_init);
2332 ATF_TP_ADD_TC(tp, uvm_page_physload_preload);
2333 ATF_TP_ADD_TC(tp, uvm_page_physload_postboot);
2334 ATF_TP_ADD_TC(tp, uvm_physseg_handle_immutable);
2335 ATF_TP_ADD_TC(tp, uvm_physseg_seg_chomp_slab);
2336 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_from_slab);
2337 ATF_TP_ADD_TC(tp, uvm_physseg_init_seg);
2338 ATF_TP_ADD_TC(tp, uvm_physseg_get_start);
2339 ATF_TP_ADD_TC(tp, uvm_physseg_get_start_invalid);
2340 ATF_TP_ADD_TC(tp, uvm_physseg_get_end);
2341 ATF_TP_ADD_TC(tp, uvm_physseg_get_end_invalid);
2342 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_start);
2343 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_start_invalid);
2344 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_end);
2345 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_end_invalid);
2346 ATF_TP_ADD_TC(tp, uvm_physseg_get_next);
2347 ATF_TP_ADD_TC(tp, uvm_physseg_get_next_invalid);
2348 ATF_TP_ADD_TC(tp, uvm_physseg_get_prev);
2349 ATF_TP_ADD_TC(tp, uvm_physseg_get_prev_invalid);
2350 ATF_TP_ADD_TC(tp, uvm_physseg_get_first);
2351 ATF_TP_ADD_TC(tp, uvm_physseg_get_last);
2352 ATF_TP_ADD_TC(tp, uvm_physseg_valid);
2353 ATF_TP_ADD_TC(tp, uvm_physseg_valid_invalid);
2354 ATF_TP_ADD_TC(tp, uvm_physseg_get_highest);
2355 ATF_TP_ADD_TC(tp, uvm_physseg_get_free_list);
2356 ATF_TP_ADD_TC(tp, uvm_physseg_get_start_hint);
2357 ATF_TP_ADD_TC(tp, uvm_physseg_set_start_hint);
2358 ATF_TP_ADD_TC(tp, uvm_physseg_set_start_hint_invalid);
2359 ATF_TP_ADD_TC(tp, uvm_physseg_get_pg);
2360
2361 #ifdef __HAVE_PMAP_PHYSSEG
2362 ATF_TP_ADD_TC(tp, uvm_physseg_get_pmseg);
2363 #endif
2364 ATF_TP_ADD_TC(tp, vm_physseg_find);
2365 ATF_TP_ADD_TC(tp, vm_physseg_find_invalid);
2366
2367 ATF_TP_ADD_TC(tp, uvm_page_physunload_start);
2368 ATF_TP_ADD_TC(tp, uvm_page_physunload_end);
2369 ATF_TP_ADD_TC(tp, uvm_page_physunload_none);
2370 ATF_TP_ADD_TC(tp, uvm_page_physunload_delete_start);
2371 ATF_TP_ADD_TC(tp, uvm_page_physunload_delete_end);
2372 ATF_TP_ADD_TC(tp, uvm_page_physunload_invalid);
2373 ATF_TP_ADD_TC(tp, uvm_page_physunload_force);
2374 ATF_TP_ADD_TC(tp, uvm_page_physunload_force_invalid);
2375
2376 return atf_no_error();
2377 }
2378