1 /* $NetBSD: t_uvm_physseg.c,v 1.11 2022/07/26 19:49:32 andvar Exp $ */
2
3 /*-
4 * Copyright (c) 2015, 2016 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Santhosh N. Raju <santhosh.raju@gmail.com> and
9 * by Cherry G. Mathew
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
21 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
22 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
23 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
24 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30 * POSSIBILITY OF SUCH DAMAGE.
31 */
32
33 #include <sys/cdefs.h>
34 __RCSID("$NetBSD: t_uvm_physseg.c,v 1.11 2022/07/26 19:49:32 andvar Exp $");
35
36 /*
37 * If this line is commented out tests related to uvm_physseg_get_pmseg()
38 * wont run.
39 *
40 * Have a look at machine/uvm_physseg.h for more details.
41 */
42 #define __HAVE_PMAP_PHYSSEG
43
44 /*
45 * This is a dummy struct used for testing purposes
46 *
47 * In reality this struct would exist in the MD part of the code residing in
48 * machines/vmparam.h
49 */
50
51 #ifdef __HAVE_PMAP_PHYSSEG
52 struct pmap_physseg {
53 int dummy_variable; /* Dummy variable use for testing */
54 };
55 #endif
56
57 /* Testing API - assumes userland */
58 /* Provide Kernel API equivalents */
59 #include <assert.h>
60 #include <errno.h>
61 #include <stdbool.h>
62 #include <string.h> /* memset(3) et. al */
63 #include <stdio.h> /* printf(3) */
64 #include <stdlib.h> /* malloc(3) */
65 #include <stdarg.h>
66 #include <stddef.h>
67
68 #define PRIxPADDR "lx"
69 #define PRIxPSIZE "lx"
70 #define PRIuPSIZE "lu"
71 #define PRIxVADDR "lx"
72 #define PRIxVSIZE "lx"
73 #define PRIuVSIZE "lu"
74
75 #define UVM_HOTPLUG /* Enable hotplug with rbtree. */
76 #define PMAP_STEAL_MEMORY
77 #define DEBUG /* Enable debug functionality. */
78
79 typedef unsigned long vaddr_t;
80 typedef unsigned long paddr_t;
81 typedef unsigned long psize_t;
82 typedef unsigned long vsize_t;
83
84 #include <uvm/uvm_physseg.h>
85 #include <uvm/uvm_page.h>
86
87 #ifndef DIAGNOSTIC
88 #define KASSERTMSG(e, msg, ...) /* NOTHING */
89 #define KASSERT(e) /* NOTHING */
90 #else
91 #define KASSERT(a) assert(a)
92 #define KASSERTMSG(exp, ...) printf(__VA_ARGS__); assert((exp))
93 #endif
94
95 #define VM_PHYSSEG_STRAT VM_PSTRAT_BSEARCH
96
97 #define VM_NFREELIST 4
98 #define VM_FREELIST_DEFAULT 0
99 #define VM_FREELIST_FIRST16 3
100 #define VM_FREELIST_FIRST1G 2
101 #define VM_FREELIST_FIRST4G 1
102
103 /*
104 * Used in tests when Array implementation is tested
105 */
106 #if !defined(VM_PHYSSEG_MAX)
107 #define VM_PHYSSEG_MAX 1
108 #endif
109
110 #define PAGE_SHIFT 12
111 #define PAGE_SIZE (1 << PAGE_SHIFT)
112 #define PAGE_MASK (PAGE_SIZE - 1)
113 #define atop(x) (((paddr_t)(x)) >> PAGE_SHIFT)
114 #define ptoa(x) (((paddr_t)(x)) << PAGE_SHIFT)
115
116 #define mutex_enter(l)
117 #define mutex_exit(l)
118
119 psize_t physmem;
120
121 struct uvmexp uvmexp; /* decl */
122
123 /*
124 * uvm structure borrowed from uvm.h
125 *
126 * Remember this is a dummy structure used within the ATF Tests and
127 * uses only necessary fields from the original uvm struct.
128 * See uvm/uvm.h for the full struct.
129 */
130
131 struct uvm {
132 /* vm_page related parameters */
133
134 bool page_init_done; /* TRUE if uvm_page_init() finished */
135 } uvm;
136
137 #include <sys/kmem.h>
138
139 void *
kmem_alloc(size_t size,km_flag_t flags)140 kmem_alloc(size_t size, km_flag_t flags)
141 {
142 return malloc(size);
143 }
144
145 void *
kmem_zalloc(size_t size,km_flag_t flags)146 kmem_zalloc(size_t size, km_flag_t flags)
147 {
148 void *ptr;
149 ptr = malloc(size);
150
151 memset(ptr, 0, size);
152
153 return ptr;
154 }
155
156 void
kmem_free(void * mem,size_t size)157 kmem_free(void *mem, size_t size)
158 {
159 free(mem);
160 }
161
162 static void
panic(const char * fmt,...)163 panic(const char *fmt, ...)
164 {
165 va_list ap;
166
167 va_start(ap, fmt);
168 vprintf(fmt, ap);
169 printf("\n");
170 va_end(ap);
171 KASSERT(false);
172
173 /*NOTREACHED*/
174 }
175
176 static void
uvm_pagefree(struct vm_page * pg)177 uvm_pagefree(struct vm_page *pg)
178 {
179 return;
180 }
181
182 #if defined(UVM_HOTPLUG)
183 static void
uvmpdpol_reinit(void)184 uvmpdpol_reinit(void)
185 {
186 return;
187 }
188 #endif /* UVM_HOTPLUG */
189
190 /* end - Provide Kernel API equivalents */
191
192
193 #include "uvm/uvm_physseg.c"
194
195 #include <atf-c.h>
196
197 #define SIXTYFOUR_KILO (64 * 1024)
198 #define ONETWENTYEIGHT_KILO (128 * 1024)
199 #define TWOFIFTYSIX_KILO (256 * 1024)
200 #define FIVEONETWO_KILO (512 * 1024)
201 #define ONE_MEGABYTE (1024 * 1024)
202 #define TWO_MEGABYTE (2 * 1024 * 1024)
203
204 /* Sample Page Frame Numbers */
205 #define VALID_START_PFN_1 atop(0)
206 #define VALID_END_PFN_1 atop(ONE_MEGABYTE)
207 #define VALID_AVAIL_START_PFN_1 atop(0)
208 #define VALID_AVAIL_END_PFN_1 atop(ONE_MEGABYTE)
209
210 #define VALID_START_PFN_2 atop(ONE_MEGABYTE + 1)
211 #define VALID_END_PFN_2 atop(ONE_MEGABYTE * 2)
212 #define VALID_AVAIL_START_PFN_2 atop(ONE_MEGABYTE + 1)
213 #define VALID_AVAIL_END_PFN_2 atop(ONE_MEGABYTE * 2)
214
215 #define VALID_START_PFN_3 atop((ONE_MEGABYTE * 2) + 1)
216 #define VALID_END_PFN_3 atop(ONE_MEGABYTE * 3)
217 #define VALID_AVAIL_START_PFN_3 atop((ONE_MEGABYTE * 2) + 1)
218 #define VALID_AVAIL_END_PFN_3 atop(ONE_MEGABYTE * 3)
219
220 #define VALID_START_PFN_4 atop((ONE_MEGABYTE * 3) + 1)
221 #define VALID_END_PFN_4 atop(ONE_MEGABYTE * 4)
222 #define VALID_AVAIL_START_PFN_4 atop((ONE_MEGABYTE * 3) + 1)
223 #define VALID_AVAIL_END_PFN_4 atop(ONE_MEGABYTE * 4)
224
225 /*
226 * Total number of pages (of 4K size each) should be 256 for 1MB of memory.
227 */
228 #define PAGE_COUNT_1M 256
229
230 /*
231 * A debug function to print the content of upm.
232 */
233 static inline void
uvm_physseg_dump_seg(uvm_physseg_t upm)234 uvm_physseg_dump_seg(uvm_physseg_t upm)
235 {
236 #if defined(DEBUG)
237 printf("%s: seg->start == %ld\n", __func__,
238 uvm_physseg_get_start(upm));
239 printf("%s: seg->end == %ld\n", __func__,
240 uvm_physseg_get_end(upm));
241 printf("%s: seg->avail_start == %ld\n", __func__,
242 uvm_physseg_get_avail_start(upm));
243 printf("%s: seg->avail_end == %ld\n", __func__,
244 uvm_physseg_get_avail_end(upm));
245
246 printf("====\n\n");
247 #else
248 return;
249 #endif /* DEBUG */
250 }
251
252 /*
253 * Private accessor that gets the value of uvm_physseg_graph.nentries
254 */
255 static int
uvm_physseg_get_entries(void)256 uvm_physseg_get_entries(void)
257 {
258 #if defined(UVM_HOTPLUG)
259 return uvm_physseg_graph.nentries;
260 #else
261 return vm_nphysmem;
262 #endif /* UVM_HOTPLUG */
263 }
264
265 #if !defined(UVM_HOTPLUG)
266 static void *
uvm_physseg_alloc(size_t sz)267 uvm_physseg_alloc(size_t sz)
268 {
269 return &vm_physmem[vm_nphysseg++];
270 }
271 #endif
272
273 /*
274 * This macro was added to convert uvmexp.npages from int to psize_t
275 */
276 #define INT_TO_PSIZE_T(X) (psize_t)X
277
278 /*
279 * Test Fixture SetUp().
280 */
281 static void
setup(void)282 setup(void)
283 {
284 /* Prerequisites for running certain calls in uvm_physseg */
285 uvmexp.pagesize = PAGE_SIZE;
286 uvmexp.npages = 0;
287 uvm.page_init_done = false;
288 uvm_physseg_init();
289 }
290
291
292 /* <---- Tests for Internal functions ----> */
293 #if defined(UVM_HOTPLUG)
294 ATF_TC(uvm_physseg_alloc_atboot_mismatch);
ATF_TC_HEAD(uvm_physseg_alloc_atboot_mismatch,tc)295 ATF_TC_HEAD(uvm_physseg_alloc_atboot_mismatch, tc)
296 {
297 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_alloc() sanity"
298 "size mismatch alloc() test.");
299 }
300
ATF_TC_BODY(uvm_physseg_alloc_atboot_mismatch,tc)301 ATF_TC_BODY(uvm_physseg_alloc_atboot_mismatch, tc)
302 {
303 uvm.page_init_done = false;
304
305 atf_tc_expect_signal(SIGABRT, "size mismatch alloc()");
306
307 uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1);
308 }
309
310 ATF_TC(uvm_physseg_alloc_atboot_overrun);
ATF_TC_HEAD(uvm_physseg_alloc_atboot_overrun,tc)311 ATF_TC_HEAD(uvm_physseg_alloc_atboot_overrun, tc)
312 {
313 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_alloc() sanity"
314 "array overrun alloc() test.");
315 }
316
ATF_TC_BODY(uvm_physseg_alloc_atboot_overrun,tc)317 ATF_TC_BODY(uvm_physseg_alloc_atboot_overrun, tc)
318 {
319 uvm.page_init_done = false;
320
321 atf_tc_expect_signal(SIGABRT, "array overrun alloc()");
322
323 uvm_physseg_alloc((VM_PHYSSEG_MAX + 1) * sizeof(struct uvm_physseg));
324
325 }
326
327 ATF_TC(uvm_physseg_alloc_sanity);
ATF_TC_HEAD(uvm_physseg_alloc_sanity,tc)328 ATF_TC_HEAD(uvm_physseg_alloc_sanity, tc)
329 {
330 atf_tc_set_md_var(tc, "descr", "further uvm_physseg_alloc() sanity checks");
331 }
332
ATF_TC_BODY(uvm_physseg_alloc_sanity,tc)333 ATF_TC_BODY(uvm_physseg_alloc_sanity, tc)
334 {
335
336 /* At boot time */
337 uvm.page_init_done = false;
338
339 /* Correct alloc */
340 ATF_REQUIRE(uvm_physseg_alloc(VM_PHYSSEG_MAX * sizeof(struct uvm_physseg)));
341
342 /* Retry static alloc()s as dynamic - we expect them to pass */
343 uvm.page_init_done = true;
344 ATF_REQUIRE(uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1));
345 ATF_REQUIRE(uvm_physseg_alloc(2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg)));
346 }
347
348 ATF_TC(uvm_physseg_free_atboot_mismatch);
ATF_TC_HEAD(uvm_physseg_free_atboot_mismatch,tc)349 ATF_TC_HEAD(uvm_physseg_free_atboot_mismatch, tc)
350 {
351 atf_tc_set_md_var(tc, "descr", "boot time uvm_physseg_free() sanity"
352 "size mismatch free() test.");
353 }
354
ATF_TC_BODY(uvm_physseg_free_atboot_mismatch,tc)355 ATF_TC_BODY(uvm_physseg_free_atboot_mismatch, tc)
356 {
357 uvm.page_init_done = false;
358
359 atf_tc_expect_signal(SIGABRT, "size mismatch free()");
360
361 uvm_physseg_free(&uvm_physseg[0], sizeof(struct uvm_physseg) - 1);
362 }
363
364 ATF_TC(uvm_physseg_free_sanity);
ATF_TC_HEAD(uvm_physseg_free_sanity,tc)365 ATF_TC_HEAD(uvm_physseg_free_sanity, tc)
366 {
367 atf_tc_set_md_var(tc, "descr", "further uvm_physseg_free() sanity checks");
368 }
369
ATF_TC_BODY(uvm_physseg_free_sanity,tc)370 ATF_TC_BODY(uvm_physseg_free_sanity, tc)
371 {
372
373 /* At boot time */
374 uvm.page_init_done = false;
375
376 struct uvm_physseg *seg;
377
378 #if VM_PHYSSEG_MAX > 1
379 /*
380 * Note: free()ing the entire array is considered to be an
381 * error. Thus VM_PHYSSEG_MAX - 1.
382 */
383
384 seg = uvm_physseg_alloc((VM_PHYSSEG_MAX - 1) * sizeof(*seg));
385 uvm_physseg_free(seg, (VM_PHYSSEG_MAX - 1) * sizeof(struct uvm_physseg));
386 #endif
387
388 /* Retry static alloc()s as dynamic - we expect them to pass */
389 uvm.page_init_done = true;
390
391 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg) - 1);
392 uvm_physseg_free(seg, sizeof(struct uvm_physseg) - 1);
393
394 seg = uvm_physseg_alloc(2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg));
395
396 uvm_physseg_free(seg, 2 * VM_PHYSSEG_MAX * sizeof(struct uvm_physseg));
397 }
398
399 #if VM_PHYSSEG_MAX > 1
400 ATF_TC(uvm_physseg_atboot_free_leak);
ATF_TC_HEAD(uvm_physseg_atboot_free_leak,tc)401 ATF_TC_HEAD(uvm_physseg_atboot_free_leak, tc)
402 {
403 atf_tc_set_md_var(tc, "descr",
404 "does free() leak at boot ?"
405 "This test needs VM_PHYSSEG_MAX > 1)");
406 }
407
ATF_TC_BODY(uvm_physseg_atboot_free_leak,tc)408 ATF_TC_BODY(uvm_physseg_atboot_free_leak, tc)
409 {
410
411 /* At boot time */
412 uvm.page_init_done = false;
413
414 /* alloc to array size */
415 struct uvm_physseg *seg;
416 seg = uvm_physseg_alloc(VM_PHYSSEG_MAX * sizeof(*seg));
417
418 uvm_physseg_free(seg, sizeof(*seg));
419
420 atf_tc_expect_signal(SIGABRT, "array overrun on alloc() after leak");
421
422 ATF_REQUIRE(uvm_physseg_alloc(sizeof(struct uvm_physseg)));
423 }
424 #endif /* VM_PHYSSEG_MAX */
425 #endif /* UVM_HOTPLUG */
426
427 /*
428 * Note: This function replicates verbatim what happens in
429 * uvm_page.c:uvm_page_init().
430 *
431 * Please track any changes that happen there.
432 */
433 static void
uvm_page_init_fake(struct vm_page * pagearray,psize_t pagecount)434 uvm_page_init_fake(struct vm_page *pagearray, psize_t pagecount)
435 {
436 uvm_physseg_t bank;
437 size_t n;
438
439 for (bank = uvm_physseg_get_first(),
440 uvm_physseg_seg_chomp_slab(bank, pagearray, pagecount);
441 uvm_physseg_valid_p(bank);
442 bank = uvm_physseg_get_next(bank)) {
443
444 n = uvm_physseg_get_end(bank) - uvm_physseg_get_start(bank);
445 uvm_physseg_seg_alloc_from_slab(bank, n);
446 uvm_physseg_init_seg(bank, pagearray);
447
448 /* set up page array pointers */
449 pagearray += n;
450 pagecount -= n;
451 }
452
453 uvm.page_init_done = true;
454 }
455
456 ATF_TC(uvm_physseg_plug);
ATF_TC_HEAD(uvm_physseg_plug,tc)457 ATF_TC_HEAD(uvm_physseg_plug, tc)
458 {
459 atf_tc_set_md_var(tc, "descr",
460 "Test plug functionality.");
461 }
462 /* Note: We only do the second boot time plug if VM_PHYSSEG_MAX > 1 */
ATF_TC_BODY(uvm_physseg_plug,tc)463 ATF_TC_BODY(uvm_physseg_plug, tc)
464 {
465 int nentries = 0; /* Count of entries via plug done so far */
466 uvm_physseg_t upm1;
467 #if VM_PHYSSEG_MAX > 2
468 uvm_physseg_t upm2;
469 #endif
470
471 #if VM_PHYSSEG_MAX > 1
472 uvm_physseg_t upm3;
473 #endif
474 uvm_physseg_t upm4;
475 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
476 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
477 psize_t npages3 = (VALID_END_PFN_3 - VALID_START_PFN_3);
478 psize_t npages4 = (VALID_END_PFN_4 - VALID_START_PFN_4);
479 struct vm_page *pgs, *slab = malloc(sizeof(struct vm_page) * (npages1
480 #if VM_PHYSSEG_MAX > 2
481 + npages2
482 #endif
483 + npages3));
484
485 /* Fake early boot */
486
487 setup();
488
489 /* Vanilla plug x 2 */
490 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_1, npages1, &upm1), true);
491 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
492 ATF_REQUIRE_EQ(0, uvmexp.npages);
493
494 #if VM_PHYSSEG_MAX > 2
495 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_2, npages2, &upm2), true);
496 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
497 ATF_REQUIRE_EQ(0, uvmexp.npages);
498 #endif
499 /* Post boot: Fake all segments and pages accounted for. */
500 uvm_page_init_fake(slab, npages1 + npages2 + npages3);
501
502 ATF_CHECK_EQ(npages1
503 #if VM_PHYSSEG_MAX > 2
504 + npages2
505 #endif
506 , INT_TO_PSIZE_T(uvmexp.npages));
507 #if VM_PHYSSEG_MAX > 1
508 /* Scavenge plug - goes into the same slab */
509 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_3, npages3, &upm3), true);
510 ATF_REQUIRE_EQ(++nentries, uvm_physseg_get_entries());
511 ATF_REQUIRE_EQ(npages1
512 #if VM_PHYSSEG_MAX > 2
513 + npages2
514 #endif
515 + npages3, INT_TO_PSIZE_T(uvmexp.npages));
516
517 /* Scavenge plug should fit right in the slab */
518 pgs = uvm_physseg_get_pg(upm3, 0);
519 ATF_REQUIRE(pgs > slab && pgs < (slab + npages1 + npages2 + npages3));
520 #endif
521 /* Hot plug - goes into a brand new slab */
522 ATF_REQUIRE_EQ(uvm_physseg_plug(VALID_START_PFN_4, npages4, &upm4), true);
523 /* The hot plug slab should have nothing to do with the original slab */
524 pgs = uvm_physseg_get_pg(upm4, 0);
525 ATF_REQUIRE(pgs < slab || pgs >= (slab + npages1
526 #if VM_PHYSSEG_MAX > 2
527 + npages2
528 #endif
529 + npages3));
530
531 }
532 ATF_TC(uvm_physseg_unplug);
ATF_TC_HEAD(uvm_physseg_unplug,tc)533 ATF_TC_HEAD(uvm_physseg_unplug, tc)
534 {
535 atf_tc_set_md_var(tc, "descr",
536 "Test unplug functionality.");
537 }
ATF_TC_BODY(uvm_physseg_unplug,tc)538 ATF_TC_BODY(uvm_physseg_unplug, tc)
539 {
540 paddr_t pa = 0;
541
542 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
543 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
544 psize_t npages3 = (VALID_END_PFN_3 - VALID_START_PFN_3);
545
546 struct vm_page *slab = malloc(sizeof(struct vm_page) * (npages1 + npages2 + npages3));
547
548 uvm_physseg_t upm;
549
550 /* Boot time */
551 setup();
552
553 /* We start with zero segments */
554 ATF_REQUIRE_EQ(true, uvm_physseg_plug(atop(0), atop(ONE_MEGABYTE), NULL));
555 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
556 /* Do we have an arbitrary offset in there ? */
557 uvm_physseg_find(atop(TWOFIFTYSIX_KILO), &pa);
558 ATF_REQUIRE_EQ(pa, atop(TWOFIFTYSIX_KILO));
559 ATF_REQUIRE_EQ(0, uvmexp.npages); /* Boot time sanity */
560
561 #if VM_PHYSSEG_MAX == 1
562 /*
563 * This is the curious case at boot time, of having one
564 * extent(9) static entry per segment, which means that a
565 * fragmenting unplug will fail.
566 */
567 atf_tc_expect_signal(SIGABRT, "fragmenting unplug for single segment");
568
569 /*
570 * In order to test the fragmenting cases, please set
571 * VM_PHYSSEG_MAX > 1
572 */
573 #endif
574 /* Now let's unplug from the middle */
575 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(TWOFIFTYSIX_KILO), atop(FIVEONETWO_KILO)));
576 /* verify that a gap exists at TWOFIFTYSIX_KILO */
577 pa = 0; /* reset */
578 uvm_physseg_find(atop(TWOFIFTYSIX_KILO), &pa);
579 ATF_REQUIRE_EQ(pa, 0);
580
581 /* Post boot: Fake all segments and pages accounted for. */
582 uvm_page_init_fake(slab, npages1 + npages2 + npages3);
583 /* Account for the unplug */
584 ATF_CHECK_EQ(atop(FIVEONETWO_KILO), uvmexp.npages);
585
586 /* Original entry should fragment into two */
587 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
588
589 upm = uvm_physseg_find(atop(TWOFIFTYSIX_KILO + FIVEONETWO_KILO), NULL);
590
591 ATF_REQUIRE(uvm_physseg_valid_p(upm));
592
593 /* Now unplug the tail fragment - should swallow the complete entry */
594 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(TWOFIFTYSIX_KILO + FIVEONETWO_KILO), atop(TWOFIFTYSIX_KILO)));
595
596 /* The "swallow" above should have invalidated the handle */
597 ATF_REQUIRE_EQ(false, uvm_physseg_valid_p(upm));
598
599 /* Only the first one is left now */
600 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
601
602 /* Unplug from the back */
603 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(ONETWENTYEIGHT_KILO), atop(ONETWENTYEIGHT_KILO)));
604 /* Shouldn't change the number of segments */
605 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
606
607 /* Unplug from the front */
608 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(0, atop(SIXTYFOUR_KILO)));
609 /* Shouldn't change the number of segments */
610 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
611
612 /* Unplugging the final fragment should fail */
613 atf_tc_expect_signal(SIGABRT, "Unplugging the last segment");
614 ATF_REQUIRE_EQ(true, uvm_physseg_unplug(atop(SIXTYFOUR_KILO), atop(SIXTYFOUR_KILO)));
615 }
616
617
618 /* <---- end Tests for Internal functions ----> */
619
620 /* Tests for functions exported via uvm_physseg.h */
621 ATF_TC(uvm_physseg_init);
ATF_TC_HEAD(uvm_physseg_init,tc)622 ATF_TC_HEAD(uvm_physseg_init, tc)
623 {
624 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_init() call\
625 initializes the vm_physmem struct which holds the rb_tree.");
626 }
ATF_TC_BODY(uvm_physseg_init,tc)627 ATF_TC_BODY(uvm_physseg_init, tc)
628 {
629 uvm_physseg_init();
630
631 ATF_REQUIRE_EQ(0, uvm_physseg_get_entries());
632 }
633
634 ATF_TC(uvm_page_physload_preload);
ATF_TC_HEAD(uvm_page_physload_preload,tc)635 ATF_TC_HEAD(uvm_page_physload_preload, tc)
636 {
637 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
638 call works without a panic() in a preload scenario.");
639 }
ATF_TC_BODY(uvm_page_physload_preload,tc)640 ATF_TC_BODY(uvm_page_physload_preload, tc)
641 {
642 uvm_physseg_t upm;
643
644 setup();
645
646 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
647 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
648
649 /* Should return a valid handle */
650 ATF_REQUIRE(uvm_physseg_valid_p(upm));
651
652 /* No pages should be allocated yet */
653 ATF_REQUIRE_EQ(0, uvmexp.npages);
654
655 /* After the first call one segment should exist */
656 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
657
658 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
659 #if VM_PHYSSEG_MAX > 1
660 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
661 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
662
663 /* Should return a valid handle */
664 ATF_REQUIRE(uvm_physseg_valid_p(upm));
665
666 ATF_REQUIRE_EQ(0, uvmexp.npages);
667
668 /* After the second call two segments should exist */
669 ATF_CHECK_EQ(2, uvm_physseg_get_entries());
670 #endif
671 }
672
673 ATF_TC(uvm_page_physload_postboot);
ATF_TC_HEAD(uvm_page_physload_postboot,tc)674 ATF_TC_HEAD(uvm_page_physload_postboot, tc)
675 {
676 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
677 panic()s in a post boot scenario.");
678 }
ATF_TC_BODY(uvm_page_physload_postboot,tc)679 ATF_TC_BODY(uvm_page_physload_postboot, tc)
680 {
681 uvm_physseg_t upm;
682
683 psize_t npages1 = (VALID_END_PFN_1 - VALID_START_PFN_1);
684 psize_t npages2 = (VALID_END_PFN_2 - VALID_START_PFN_2);
685
686 struct vm_page *slab = malloc(sizeof(struct vm_page) * (npages1 + npages2));
687
688 setup();
689
690 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
691 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
692
693 /* Should return a valid handle */
694 ATF_REQUIRE(uvm_physseg_valid_p(upm));
695
696 /* No pages should be allocated yet */
697 ATF_REQUIRE_EQ(0, uvmexp.npages);
698
699 /* After the first call one segment should exist */
700 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
701
702 /* Post boot: Fake all segments and pages accounted for. */
703 uvm_page_init_fake(slab, npages1 + npages2);
704
705 atf_tc_expect_signal(SIGABRT,
706 "uvm_page_physload() called post boot");
707
708 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
709 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
710
711 /* Should return a valid handle */
712 ATF_REQUIRE(uvm_physseg_valid_p(upm));
713
714 ATF_REQUIRE_EQ(npages1 + npages2, INT_TO_PSIZE_T(uvmexp.npages));
715
716 /* After the second call two segments should exist */
717 ATF_CHECK_EQ(2, uvm_physseg_get_entries());
718 }
719
720 ATF_TC(uvm_physseg_handle_immutable);
ATF_TC_HEAD(uvm_physseg_handle_immutable,tc)721 ATF_TC_HEAD(uvm_physseg_handle_immutable, tc)
722 {
723 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_physseg_t handle is \
724 immutable.");
725 }
ATF_TC_BODY(uvm_physseg_handle_immutable,tc)726 ATF_TC_BODY(uvm_physseg_handle_immutable, tc)
727 {
728 uvm_physseg_t upm;
729
730 /* We insert the segments in out of order */
731
732 setup();
733
734 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
735 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
736
737 ATF_REQUIRE_EQ(0, uvmexp.npages);
738
739 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
740
741 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY, uvm_physseg_get_prev(upm));
742
743 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
744 #if VM_PHYSSEG_MAX > 1
745 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
746 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
747
748 ATF_REQUIRE_EQ(0, uvmexp.npages);
749
750 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
751
752 /* Fetch Previous, we inserted a lower value */
753 upm = uvm_physseg_get_prev(upm);
754
755 #if !defined(UVM_HOTPLUG)
756 /*
757 * This test is going to fail for the Array Implementation but is
758 * expected to pass in the RB Tree implementation.
759 */
760 /* Failure can be expected iff there are more than one handles */
761 atf_tc_expect_fail("Mutable handle in static array impl.");
762 #endif
763 ATF_CHECK(UVM_PHYSSEG_TYPE_INVALID_EMPTY != upm);
764 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
765 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
766 #endif
767 }
768
769 ATF_TC(uvm_physseg_seg_chomp_slab);
ATF_TC_HEAD(uvm_physseg_seg_chomp_slab,tc)770 ATF_TC_HEAD(uvm_physseg_seg_chomp_slab, tc)
771 {
772 atf_tc_set_md_var(tc, "descr", "The slab import code.()");
773
774 }
ATF_TC_BODY(uvm_physseg_seg_chomp_slab,tc)775 ATF_TC_BODY(uvm_physseg_seg_chomp_slab, tc)
776 {
777 int err;
778 size_t i;
779 struct uvm_physseg *seg;
780 struct vm_page *slab, *pgs;
781 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
782
783 setup();
784
785 /* This is boot time */
786 slab = malloc(sizeof(struct vm_page) * npages * 2);
787
788 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
789
790 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
791
792 /* Should be able to allocate two 128 * sizeof(*slab) */
793 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
794 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
795
796 #if VM_PHYSSEG_MAX == 1
797 /*
798 * free() needs an extra region descriptor, but we only have
799 * one! The classic alloc() at free() problem
800 */
801
802 ATF_REQUIRE_EQ(ENOMEM, err);
803 #else
804 /* Try alloc/free at static time */
805 for (i = 0; i < npages; i++) {
806 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
807 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
808 ATF_REQUIRE_EQ(0, err);
809 }
810 #endif
811
812 /* Now setup post boot */
813 uvm.page_init_done = true;
814
815 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
816
817 /* Try alloc/free after uvm_page.c:uvm_page_init() as well */
818 for (i = 0; i < npages; i++) {
819 ATF_REQUIRE_EQ(0, extent_alloc(seg->ext, sizeof(*slab), 1, 0, EX_BOUNDZERO, (void *)&pgs));
820 err = extent_free(seg->ext, (u_long) pgs, sizeof(*slab), EX_BOUNDZERO);
821 ATF_REQUIRE_EQ(0, err);
822 }
823
824 }
825
826 ATF_TC(uvm_physseg_alloc_from_slab);
ATF_TC_HEAD(uvm_physseg_alloc_from_slab,tc)827 ATF_TC_HEAD(uvm_physseg_alloc_from_slab, tc)
828 {
829 atf_tc_set_md_var(tc, "descr", "The slab alloc code.()");
830
831 }
ATF_TC_BODY(uvm_physseg_alloc_from_slab,tc)832 ATF_TC_BODY(uvm_physseg_alloc_from_slab, tc)
833 {
834 struct uvm_physseg *seg;
835 struct vm_page *slab, *pgs;
836 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
837
838 setup();
839
840 /* This is boot time */
841 slab = malloc(sizeof(struct vm_page) * npages * 2);
842
843 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
844
845 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
846
847 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
848
849 ATF_REQUIRE(pgs != NULL);
850
851 /* Now setup post boot */
852 uvm.page_init_done = true;
853
854 #if VM_PHYSSEG_MAX > 1
855 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
856 ATF_REQUIRE(pgs != NULL);
857 #endif
858 atf_tc_expect_fail("alloc beyond extent");
859
860 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
861 ATF_REQUIRE(pgs != NULL);
862 }
863
864 ATF_TC(uvm_physseg_init_seg);
ATF_TC_HEAD(uvm_physseg_init_seg,tc)865 ATF_TC_HEAD(uvm_physseg_init_seg, tc)
866 {
867 atf_tc_set_md_var(tc, "descr", "Tests if uvm_physseg_init_seg adds pages to"
868 "uvmexp.npages");
869 }
ATF_TC_BODY(uvm_physseg_init_seg,tc)870 ATF_TC_BODY(uvm_physseg_init_seg, tc)
871 {
872 struct uvm_physseg *seg;
873 struct vm_page *slab, *pgs;
874 const size_t npages = UVM_PHYSSEG_BOOT_UNPLUG_MAX; /* Number of pages */
875
876 setup();
877
878 /* This is boot time */
879 slab = malloc(sizeof(struct vm_page) * npages * 2);
880
881 seg = uvm_physseg_alloc(sizeof(struct uvm_physseg));
882
883 uvm_physseg_seg_chomp_slab(PHYSSEG_NODE_TO_HANDLE(seg), slab, npages * 2);
884
885 pgs = uvm_physseg_seg_alloc_from_slab(PHYSSEG_NODE_TO_HANDLE(seg), npages);
886
887 ATF_REQUIRE_EQ(0, uvmexp.npages);
888
889 seg->start = 0;
890 seg->end = npages;
891
892 seg->avail_start = 0;
893 seg->avail_end = npages;
894
895 uvm_physseg_init_seg(PHYSSEG_NODE_TO_HANDLE(seg), pgs);
896
897 ATF_REQUIRE_EQ(npages, INT_TO_PSIZE_T(uvmexp.npages));
898 }
899
900 #if 0
901 ATF_TC(uvm_physseg_init_seg);
902 ATF_TC_HEAD(uvm_physseg_init_seg, tc)
903 {
904 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physload() \
905 call works without a panic() after Segment is inited.");
906 }
907 ATF_TC_BODY(uvm_physseg_init_seg, tc)
908 {
909 uvm_physseg_t upm;
910 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
911 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
912
913 setup();
914 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
915 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
916
917 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
918
919 ATF_CHECK_EQ(0, uvmexp.npages);
920
921 /*
922 * Boot time physplug needs explicit external init,
923 * Duplicate what uvm_page.c:uvm_page_init() does.
924 * Note: not everything uvm_page_init() does gets done here.
925 * Read the source.
926 */
927 /* suck in backing slab, initialise extent. */
928 uvm_physseg_seg_chomp_slab(upm, pgs, npages);
929
930 /*
931 * Actual pgs[] allocation, from extent.
932 */
933 uvm_physseg_alloc_from_slab(upm, npages);
934
935 /* Now we initialize the segment */
936 uvm_physseg_init_seg(upm, pgs);
937
938 /* Done with boot simulation */
939 extent_init();
940 uvm.page_init_done = true;
941
942 /* We have total memory of 1MB */
943 ATF_CHECK_EQ(PAGE_COUNT_1M, uvmexp.npages);
944
945 upm =uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
946 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
947 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
948
949 /* We added another 1MB so PAGE_COUNT_1M + PAGE_COUNT_1M */
950 ATF_CHECK_EQ(PAGE_COUNT_1M + PAGE_COUNT_1M, uvmexp.npages);
951
952 }
953 #endif
954
955 ATF_TC(uvm_physseg_get_start);
ATF_TC_HEAD(uvm_physseg_get_start,tc)956 ATF_TC_HEAD(uvm_physseg_get_start, tc)
957 {
958 atf_tc_set_md_var(tc, "descr", "Tests if the start PFN is returned \
959 correctly from a segment created via uvm_page_physload().");
960 }
ATF_TC_BODY(uvm_physseg_get_start,tc)961 ATF_TC_BODY(uvm_physseg_get_start, tc)
962 {
963 uvm_physseg_t upm;
964
965 /* Fake early boot */
966 setup();
967
968 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
969 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
970
971 ATF_REQUIRE_EQ(0, uvmexp.npages);
972
973 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
974
975 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
976
977 /* This test will be triggered only if there are 2 or more segments. */
978 #if VM_PHYSSEG_MAX > 1
979 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
980 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
981
982 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
983
984 ATF_REQUIRE_EQ(0, uvmexp.npages);
985
986 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
987 #endif
988 }
989
990 ATF_TC(uvm_physseg_get_start_invalid);
ATF_TC_HEAD(uvm_physseg_get_start_invalid,tc)991 ATF_TC_HEAD(uvm_physseg_get_start_invalid, tc)
992 {
993 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
994 correctly when uvm_physseg_get_start() is called with invalid \
995 parameter values.");
996 }
ATF_TC_BODY(uvm_physseg_get_start_invalid,tc)997 ATF_TC_BODY(uvm_physseg_get_start_invalid, tc)
998 {
999 /* Check for pgs == NULL */
1000 setup();
1001 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1002 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1003
1004 /* Force other check conditions */
1005 uvm.page_init_done = true;
1006
1007 ATF_REQUIRE_EQ(0, uvmexp.npages);
1008
1009 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1010
1011 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1012
1013 /* Invalid uvm_physseg_t */
1014 ATF_CHECK_EQ((paddr_t) -1,
1015 uvm_physseg_get_start(UVM_PHYSSEG_TYPE_INVALID));
1016 }
1017
1018 ATF_TC(uvm_physseg_get_end);
ATF_TC_HEAD(uvm_physseg_get_end,tc)1019 ATF_TC_HEAD(uvm_physseg_get_end, tc)
1020 {
1021 atf_tc_set_md_var(tc, "descr", "Tests if the end PFN is returned \
1022 correctly from a segment created via uvm_page_physload().");
1023 }
ATF_TC_BODY(uvm_physseg_get_end,tc)1024 ATF_TC_BODY(uvm_physseg_get_end, tc)
1025 {
1026 uvm_physseg_t upm;
1027
1028 setup();
1029 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1030 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1031
1032 ATF_REQUIRE_EQ(0, uvmexp.npages);
1033
1034 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1035
1036 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1037
1038 /* This test will be triggered only if there are 2 or more segments. */
1039 #if VM_PHYSSEG_MAX > 1
1040 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1041 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1042
1043 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1044
1045 ATF_REQUIRE_EQ(0, uvmexp.npages);
1046
1047 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1048 #endif
1049 }
1050
1051 ATF_TC(uvm_physseg_get_end_invalid);
ATF_TC_HEAD(uvm_physseg_get_end_invalid,tc)1052 ATF_TC_HEAD(uvm_physseg_get_end_invalid, tc)
1053 {
1054 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1055 correctly when uvm_physseg_get_end() is called with invalid \
1056 parameter values.");
1057 }
ATF_TC_BODY(uvm_physseg_get_end_invalid,tc)1058 ATF_TC_BODY(uvm_physseg_get_end_invalid, tc)
1059 {
1060 /* Check for pgs == NULL */
1061 setup();
1062 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1063 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1064
1065 /* Force other check conditions */
1066 uvm.page_init_done = true;
1067
1068 ATF_REQUIRE_EQ(0, uvmexp.npages);
1069
1070 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1071
1072 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1073
1074 /* Invalid uvm_physseg_t */
1075 ATF_CHECK_EQ((paddr_t) -1,
1076 uvm_physseg_get_end(UVM_PHYSSEG_TYPE_INVALID));
1077 }
1078
1079 ATF_TC(uvm_physseg_get_avail_start);
ATF_TC_HEAD(uvm_physseg_get_avail_start,tc)1080 ATF_TC_HEAD(uvm_physseg_get_avail_start, tc)
1081 {
1082 atf_tc_set_md_var(tc, "descr", "Tests if the avail_start PFN is \
1083 returned correctly from a segment created via uvm_page_physload().");
1084 }
ATF_TC_BODY(uvm_physseg_get_avail_start,tc)1085 ATF_TC_BODY(uvm_physseg_get_avail_start, tc)
1086 {
1087 uvm_physseg_t upm;
1088
1089 setup();
1090 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1091 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1092
1093 ATF_REQUIRE_EQ(0, uvmexp.npages);
1094
1095 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1096
1097 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1098
1099 /* This test will be triggered only if there are 2 or more segments. */
1100 #if VM_PHYSSEG_MAX > 1
1101 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1102 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1103
1104 ATF_REQUIRE_EQ(0, uvmexp.npages);
1105
1106 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1107
1108 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1109 #endif
1110 }
1111
1112 ATF_TC(uvm_physseg_get_avail_start_invalid);
ATF_TC_HEAD(uvm_physseg_get_avail_start_invalid,tc)1113 ATF_TC_HEAD(uvm_physseg_get_avail_start_invalid, tc)
1114 {
1115 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1116 correctly when uvm_physseg_get_avail_start() is called with invalid\
1117 parameter values.");
1118 }
ATF_TC_BODY(uvm_physseg_get_avail_start_invalid,tc)1119 ATF_TC_BODY(uvm_physseg_get_avail_start_invalid, tc)
1120 {
1121 /* Check for pgs == NULL */
1122 setup();
1123 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1124 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1125
1126 /* Force other check conditions */
1127 uvm.page_init_done = true;
1128
1129 ATF_REQUIRE_EQ(0, uvmexp.npages);
1130
1131 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1132
1133 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1134
1135 /* Invalid uvm_physseg_t */
1136 ATF_CHECK_EQ((paddr_t) -1,
1137 uvm_physseg_get_avail_start(UVM_PHYSSEG_TYPE_INVALID));
1138 }
1139
1140 ATF_TC(uvm_physseg_get_avail_end);
ATF_TC_HEAD(uvm_physseg_get_avail_end,tc)1141 ATF_TC_HEAD(uvm_physseg_get_avail_end, tc)
1142 {
1143 atf_tc_set_md_var(tc, "descr", "Tests if the avail_end PFN is \
1144 returned correctly from a segment created via uvm_page_physload().");
1145 }
ATF_TC_BODY(uvm_physseg_get_avail_end,tc)1146 ATF_TC_BODY(uvm_physseg_get_avail_end, tc)
1147 {
1148 uvm_physseg_t upm;
1149
1150 setup();
1151 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1152 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1153
1154 ATF_REQUIRE_EQ(0, uvmexp.npages);
1155
1156 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1157
1158 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1159
1160 /* This test will be triggered only if there are 2 or more segments. */
1161 #if VM_PHYSSEG_MAX > 1
1162 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1163 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1164
1165 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1166
1167 ATF_REQUIRE_EQ(0, uvmexp.npages);
1168
1169 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1170 #endif
1171 }
1172
1173 ATF_TC(uvm_physseg_get_avail_end_invalid);
ATF_TC_HEAD(uvm_physseg_get_avail_end_invalid,tc)1174 ATF_TC_HEAD(uvm_physseg_get_avail_end_invalid, tc)
1175 {
1176 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1177 correctly when uvm_physseg_get_avail_end() is called with invalid\
1178 parameter values.");
1179 }
ATF_TC_BODY(uvm_physseg_get_avail_end_invalid,tc)1180 ATF_TC_BODY(uvm_physseg_get_avail_end_invalid, tc)
1181 {
1182 /* Check for pgs == NULL */
1183 setup();
1184 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1185 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1186
1187 /* Force other check conditions */
1188 uvm.page_init_done = true;
1189
1190 ATF_REQUIRE_EQ(0, uvmexp.npages);
1191
1192 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1193
1194 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1195
1196 /* Invalid uvm_physseg_t */
1197 ATF_CHECK_EQ((paddr_t) -1,
1198 uvm_physseg_get_avail_end(UVM_PHYSSEG_TYPE_INVALID));
1199 }
1200
1201 ATF_TC(uvm_physseg_get_next);
ATF_TC_HEAD(uvm_physseg_get_next,tc)1202 ATF_TC_HEAD(uvm_physseg_get_next, tc)
1203 {
1204 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for next \
1205 segment using the uvm_physseg_get_next() call.");
1206 }
ATF_TC_BODY(uvm_physseg_get_next,tc)1207 ATF_TC_BODY(uvm_physseg_get_next, tc)
1208 {
1209 uvm_physseg_t upm;
1210 #if VM_PHYSSEG_MAX > 1
1211 uvm_physseg_t upm_next;
1212 #endif
1213
1214 /* We insert the segments in ascending order */
1215
1216 setup();
1217 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1218 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1219
1220 ATF_REQUIRE_EQ(0, uvmexp.npages);
1221
1222 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1223
1224 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_OVERFLOW,
1225 uvm_physseg_get_next(upm));
1226
1227 /* This test will be triggered only if there are 2 or more segments. */
1228 #if VM_PHYSSEG_MAX > 1
1229 upm_next = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1230 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1231
1232 ATF_REQUIRE_EQ(0, uvmexp.npages);
1233
1234 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1235
1236 upm = uvm_physseg_get_next(upm); /* Fetch Next */
1237
1238 ATF_CHECK_EQ(upm_next, upm);
1239 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1240 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1241 #endif
1242
1243 /* This test will be triggered only if there are 3 or more segments. */
1244 #if VM_PHYSSEG_MAX > 2
1245 upm_next = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1246 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1247
1248 ATF_REQUIRE_EQ(0, uvmexp.npages);
1249
1250 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1251
1252 upm = uvm_physseg_get_next(upm); /* Fetch Next */
1253
1254 ATF_CHECK_EQ(upm_next, upm);
1255 ATF_CHECK_EQ(VALID_START_PFN_3, uvm_physseg_get_start(upm));
1256 ATF_CHECK_EQ(VALID_END_PFN_3, uvm_physseg_get_end(upm));
1257 #endif
1258 }
1259
1260 ATF_TC(uvm_physseg_get_next_invalid);
ATF_TC_HEAD(uvm_physseg_get_next_invalid,tc)1261 ATF_TC_HEAD(uvm_physseg_get_next_invalid, tc)
1262 {
1263 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1264 correctly when uvm_physseg_get_next() is called with invalid \
1265 parameter values.");
1266 }
ATF_TC_BODY(uvm_physseg_get_next_invalid,tc)1267 ATF_TC_BODY(uvm_physseg_get_next_invalid, tc)
1268 {
1269 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID;
1270
1271 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID, uvm_physseg_get_next(upm));
1272 }
1273
1274 ATF_TC(uvm_physseg_get_prev);
ATF_TC_HEAD(uvm_physseg_get_prev,tc)1275 ATF_TC_HEAD(uvm_physseg_get_prev, tc)
1276 {
1277 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for previous \
1278 segment using the uvm_physseg_get_prev() call.");
1279 }
ATF_TC_BODY(uvm_physseg_get_prev,tc)1280 ATF_TC_BODY(uvm_physseg_get_prev, tc)
1281 {
1282 #if VM_PHYSSEG_MAX > 1
1283 uvm_physseg_t upm;
1284 #endif
1285 uvm_physseg_t upm_prev;
1286
1287
1288 setup();
1289 upm_prev = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1290 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1291
1292 ATF_REQUIRE_EQ(0, uvmexp.npages);
1293
1294 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1295
1296 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY,
1297 uvm_physseg_get_prev(upm_prev));
1298
1299 /* This test will be triggered only if there are 2 or more segments. */
1300 #if VM_PHYSSEG_MAX > 1
1301 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1302 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1303
1304 ATF_REQUIRE_EQ(0, uvmexp.npages);
1305
1306 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1307
1308 /* Fetch Previous, we inserted a lower value */
1309 upm = uvm_physseg_get_prev(upm);
1310
1311 ATF_CHECK_EQ(upm_prev, upm);
1312 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1313 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1314 #endif
1315
1316 /* This test will be triggered only if there are 3 or more segments. */
1317 #if VM_PHYSSEG_MAX > 2
1318 uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1319 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1320
1321 ATF_REQUIRE_EQ(0, uvmexp.npages);
1322
1323 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1324
1325 /*
1326 * This will return a UVM_PHYSSEG_TYPE_INVALID_EMPTY we are at the
1327 * lowest
1328 */
1329 upm = uvm_physseg_get_prev(upm);
1330
1331 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID_EMPTY, upm);
1332 #endif
1333 }
1334
1335 ATF_TC(uvm_physseg_get_prev_invalid);
ATF_TC_HEAD(uvm_physseg_get_prev_invalid,tc)1336 ATF_TC_HEAD(uvm_physseg_get_prev_invalid, tc)
1337 {
1338 atf_tc_set_md_var(tc, "descr", "Tests the invalid / error conditions \
1339 correctly when uvm_physseg_get_prev() is called with invalid \
1340 parameter values.");
1341 }
ATF_TC_BODY(uvm_physseg_get_prev_invalid,tc)1342 ATF_TC_BODY(uvm_physseg_get_prev_invalid, tc)
1343 {
1344 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID;
1345
1346 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID, uvm_physseg_get_prev(upm));
1347 }
1348
1349 ATF_TC(uvm_physseg_get_first);
ATF_TC_HEAD(uvm_physseg_get_first,tc)1350 ATF_TC_HEAD(uvm_physseg_get_first, tc)
1351 {
1352 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for first \
1353 segment (lowest node) using the uvm_physseg_get_first() call.");
1354 }
ATF_TC_BODY(uvm_physseg_get_first,tc)1355 ATF_TC_BODY(uvm_physseg_get_first, tc)
1356 {
1357 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID_EMPTY;
1358 uvm_physseg_t upm_first;
1359
1360 /* Fake early boot */
1361 setup();
1362
1363 /* No nodes exist */
1364 ATF_CHECK_EQ(upm, uvm_physseg_get_first());
1365
1366 upm_first = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1367 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1368
1369 ATF_REQUIRE_EQ(0, uvmexp.npages);
1370
1371 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1372
1373 /* Pointer to first should be the least valued node */
1374 upm = uvm_physseg_get_first();
1375 ATF_CHECK_EQ(upm_first, upm);
1376 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1377 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1378 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1379 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1380
1381 /* This test will be triggered only if there are 2 or more segments. */
1382 #if VM_PHYSSEG_MAX > 1
1383 /* Insert a node of lesser value */
1384 upm_first = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1385 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1386
1387 ATF_CHECK_EQ(0, uvmexp.npages);
1388
1389 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1390
1391 /* Pointer to first should be the least valued node */
1392 upm = uvm_physseg_get_first();
1393 ATF_CHECK_EQ(upm_first, upm);
1394 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1395 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1396 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1397 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1398 #endif
1399
1400 /* This test will be triggered only if there are 3 or more segments. */
1401 #if VM_PHYSSEG_MAX > 2
1402 /* Insert a node of higher value */
1403 upm_first =uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1404 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1405
1406 ATF_CHECK_EQ(0, uvmexp.npages);
1407
1408 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1409
1410 /* Pointer to first should be the least valued node */
1411 upm = uvm_physseg_get_first();
1412 ATF_CHECK(upm_first != upm);
1413 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1414 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1415 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1416 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1417 #endif
1418 }
1419
1420 ATF_TC(uvm_physseg_get_last);
ATF_TC_HEAD(uvm_physseg_get_last,tc)1421 ATF_TC_HEAD(uvm_physseg_get_last, tc)
1422 {
1423 atf_tc_set_md_var(tc, "descr", "Tests the pointer values for last \
1424 segment using the uvm_physseg_get_last() call.");
1425 }
ATF_TC_BODY(uvm_physseg_get_last,tc)1426 ATF_TC_BODY(uvm_physseg_get_last, tc)
1427 {
1428 uvm_physseg_t upm = UVM_PHYSSEG_TYPE_INVALID_EMPTY;
1429 uvm_physseg_t upm_last;
1430
1431 setup();
1432
1433 /* No nodes exist */
1434 ATF_CHECK_EQ(upm, uvm_physseg_get_last());
1435
1436 upm_last = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1437 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1438
1439 ATF_REQUIRE_EQ(0, uvmexp.npages);
1440
1441 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1442
1443 /* Pointer to last should be the most valued node */
1444 upm = uvm_physseg_get_last();
1445 ATF_CHECK_EQ(upm_last, upm);
1446 ATF_CHECK_EQ(VALID_START_PFN_1, uvm_physseg_get_start(upm));
1447 ATF_CHECK_EQ(VALID_END_PFN_1, uvm_physseg_get_end(upm));
1448 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_1, uvm_physseg_get_avail_start(upm));
1449 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1, uvm_physseg_get_avail_end(upm));
1450
1451 /* This test will be triggered only if there are 2 or more segments. */
1452 #if VM_PHYSSEG_MAX > 1
1453 /* Insert node of greater value */
1454 upm_last = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1455 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1456
1457 ATF_REQUIRE_EQ(0, uvmexp.npages);
1458
1459 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1460
1461 /* Pointer to last should be the most valued node */
1462 upm = uvm_physseg_get_last();
1463 ATF_CHECK_EQ(upm_last, upm);
1464 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
1465 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1466 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
1467 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1468 #endif
1469
1470 /* This test will be triggered only if there are 3 or more segments. */
1471 #if VM_PHYSSEG_MAX > 2
1472 /* Insert node of greater value */
1473 upm_last = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1474 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1475
1476 ATF_REQUIRE_EQ(0, uvmexp.npages);
1477
1478 ATF_REQUIRE_EQ(3, uvm_physseg_get_entries());
1479
1480 /* Pointer to last should be the most valued node */
1481 upm = uvm_physseg_get_last();
1482 ATF_CHECK_EQ(upm_last, upm);
1483 ATF_CHECK_EQ(VALID_START_PFN_3, uvm_physseg_get_start(upm));
1484 ATF_CHECK_EQ(VALID_END_PFN_3, uvm_physseg_get_end(upm));
1485 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_3, uvm_physseg_get_avail_start(upm));
1486 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3, uvm_physseg_get_avail_end(upm));
1487 #endif
1488 }
1489
1490 ATF_TC(uvm_physseg_valid);
ATF_TC_HEAD(uvm_physseg_valid,tc)1491 ATF_TC_HEAD(uvm_physseg_valid, tc)
1492 {
1493 atf_tc_set_md_var(tc, "descr", "Tests the pointer value for current \
1494 segment is valid using the uvm_physseg_valid_p() call.");
1495 }
ATF_TC_BODY(uvm_physseg_valid,tc)1496 ATF_TC_BODY(uvm_physseg_valid, tc)
1497 {
1498 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1499
1500 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1501
1502 uvm_physseg_t upm;
1503
1504 setup();
1505 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1506 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1507
1508 ATF_REQUIRE_EQ(0, uvmexp.npages);
1509
1510 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1511
1512 uvm_physseg_init_seg(upm, pgs);
1513
1514 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1515
1516 ATF_CHECK_EQ(true, uvm_physseg_valid_p(upm));
1517 }
1518
1519 ATF_TC(uvm_physseg_valid_invalid);
ATF_TC_HEAD(uvm_physseg_valid_invalid,tc)1520 ATF_TC_HEAD(uvm_physseg_valid_invalid, tc)
1521 {
1522 atf_tc_set_md_var(tc, "descr", "Tests the pointer value for current \
1523 segment is invalid using the uvm_physseg_valid_p() call.");
1524 }
ATF_TC_BODY(uvm_physseg_valid_invalid,tc)1525 ATF_TC_BODY(uvm_physseg_valid_invalid, tc)
1526 {
1527 uvm_physseg_t upm;
1528
1529 setup();
1530 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1531 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1532
1533 /* Force other check conditions */
1534 uvm.page_init_done = true;
1535
1536 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1537
1538 /* Invalid uvm_physseg_t */
1539 ATF_CHECK_EQ(false, uvm_physseg_valid_p(UVM_PHYSSEG_TYPE_INVALID));
1540
1541 /*
1542 * Without any pages initialized for segment, it is considered
1543 * invalid
1544 */
1545 ATF_CHECK_EQ(false, uvm_physseg_valid_p(upm));
1546 }
1547
1548 ATF_TC(uvm_physseg_get_highest);
ATF_TC_HEAD(uvm_physseg_get_highest,tc)1549 ATF_TC_HEAD(uvm_physseg_get_highest, tc)
1550 {
1551 atf_tc_set_md_var(tc, "descr", "Tests if the returned PFN matches \
1552 the highest PFN in use by the system.");
1553 }
ATF_TC_BODY(uvm_physseg_get_highest,tc)1554 ATF_TC_BODY(uvm_physseg_get_highest, tc)
1555 {
1556 setup();
1557 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1558 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1559
1560 /* Only one segment so highest is the current */
1561 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_1 - 1, uvm_physseg_get_highest_frame());
1562
1563 /* This test will be triggered only if there are 2 or more segments. */
1564 #if VM_PHYSSEG_MAX > 1
1565 uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1566 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_DEFAULT);
1567
1568 /* PFN_3 > PFN_1 */
1569 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3 - 1, uvm_physseg_get_highest_frame());
1570 #endif
1571
1572 /* This test will be triggered only if there are 3 or more segments. */
1573 #if VM_PHYSSEG_MAX > 2
1574 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1575 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1576
1577 /* PFN_3 > PFN_2 */
1578 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_3 - 1, uvm_physseg_get_highest_frame());
1579 #endif
1580 }
1581
1582 ATF_TC(uvm_physseg_get_free_list);
ATF_TC_HEAD(uvm_physseg_get_free_list,tc)1583 ATF_TC_HEAD(uvm_physseg_get_free_list, tc)
1584 {
1585 atf_tc_set_md_var(tc, "descr", "Tests if the returned Free List type \
1586 of a segment matches the one returned from \
1587 uvm_physseg_get_free_list() call.");
1588 }
ATF_TC_BODY(uvm_physseg_get_free_list,tc)1589 ATF_TC_BODY(uvm_physseg_get_free_list, tc)
1590 {
1591 uvm_physseg_t upm;
1592
1593 /* Fake early boot */
1594 setup();
1595
1596 /* Insertions are made in ascending order */
1597 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1598 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1599
1600 ATF_CHECK_EQ(VM_FREELIST_DEFAULT, uvm_physseg_get_free_list(upm));
1601
1602 /* This test will be triggered only if there are 2 or more segments. */
1603 #if VM_PHYSSEG_MAX > 1
1604 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1605 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_FIRST16);
1606
1607 ATF_CHECK_EQ(VM_FREELIST_FIRST16, uvm_physseg_get_free_list(upm));
1608 #endif
1609
1610 /* This test will be triggered only if there are 3 or more segments. */
1611 #if VM_PHYSSEG_MAX > 2
1612 upm = uvm_page_physload(VALID_START_PFN_3, VALID_END_PFN_3,
1613 VALID_AVAIL_START_PFN_3, VALID_AVAIL_END_PFN_3, VM_FREELIST_FIRST1G);
1614
1615 ATF_CHECK_EQ(VM_FREELIST_FIRST1G, uvm_physseg_get_free_list(upm));
1616 #endif
1617 }
1618
1619 ATF_TC(uvm_physseg_get_start_hint);
ATF_TC_HEAD(uvm_physseg_get_start_hint,tc)1620 ATF_TC_HEAD(uvm_physseg_get_start_hint, tc)
1621 {
1622 atf_tc_set_md_var(tc, "descr", "Tests if the returned start_hint value \
1623 of a segment matches the one returned from \
1624 uvm_physseg_get_start_hint() call.");
1625 }
ATF_TC_BODY(uvm_physseg_get_start_hint,tc)1626 ATF_TC_BODY(uvm_physseg_get_start_hint, tc)
1627 {
1628 uvm_physseg_t upm;
1629
1630 setup();
1631 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1632 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1633
1634 /* Will be Zero since no specific value is set during init */
1635 ATF_CHECK_EQ(0, uvm_physseg_get_start_hint(upm));
1636 }
1637
1638 ATF_TC(uvm_physseg_set_start_hint);
ATF_TC_HEAD(uvm_physseg_set_start_hint,tc)1639 ATF_TC_HEAD(uvm_physseg_set_start_hint, tc)
1640 {
1641 atf_tc_set_md_var(tc, "descr", "Tests if the returned start_hint value \
1642 of a segment matches the one set by the \
1643 uvm_physseg_set_start_hint() call.");
1644 }
ATF_TC_BODY(uvm_physseg_set_start_hint,tc)1645 ATF_TC_BODY(uvm_physseg_set_start_hint, tc)
1646 {
1647 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1648
1649 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1650
1651 uvm_physseg_t upm;
1652
1653 setup();
1654 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1655 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1656
1657 uvm_physseg_init_seg(upm, pgs);
1658
1659 ATF_CHECK_EQ(true, uvm_physseg_set_start_hint(upm, atop(128)));
1660
1661 /* Will be atop(128) since no specific value is set above */
1662 ATF_CHECK_EQ(atop(128), uvm_physseg_get_start_hint(upm));
1663 }
1664
1665 ATF_TC(uvm_physseg_set_start_hint_invalid);
ATF_TC_HEAD(uvm_physseg_set_start_hint_invalid,tc)1666 ATF_TC_HEAD(uvm_physseg_set_start_hint_invalid, tc)
1667 {
1668 atf_tc_set_md_var(tc, "descr", "Tests if the returned value is false \
1669 when an invalid segment matches the one trying to set by the \
1670 uvm_physseg_set_start_hint() call.");
1671 }
ATF_TC_BODY(uvm_physseg_set_start_hint_invalid,tc)1672 ATF_TC_BODY(uvm_physseg_set_start_hint_invalid, tc)
1673 {
1674 uvm_physseg_t upm;
1675
1676 setup();
1677 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1678 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1679
1680 /* Force other check conditions */
1681 uvm.page_init_done = true;
1682
1683 ATF_REQUIRE_EQ(true, uvm.page_init_done);
1684
1685 ATF_CHECK_EQ(false, uvm_physseg_set_start_hint(upm, atop(128)));
1686
1687 /*
1688 * Will be Zero since no specific value is set after the init
1689 * due to failure
1690 */
1691 atf_tc_expect_signal(SIGABRT, "invalid uvm_physseg_t handle");
1692
1693 ATF_CHECK_EQ(0, uvm_physseg_get_start_hint(upm));
1694 }
1695
1696 ATF_TC(uvm_physseg_get_pg);
ATF_TC_HEAD(uvm_physseg_get_pg,tc)1697 ATF_TC_HEAD(uvm_physseg_get_pg, tc)
1698 {
1699 atf_tc_set_md_var(tc, "descr", "Tests if the returned vm_page struct \
1700 is correct when fetched by uvm_physseg_get_pg() call.");
1701 }
ATF_TC_BODY(uvm_physseg_get_pg,tc)1702 ATF_TC_BODY(uvm_physseg_get_pg, tc)
1703 {
1704 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1705
1706 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1707
1708 struct vm_page *extracted_pg = NULL;
1709
1710 uvm_physseg_t upm;
1711
1712 setup();
1713 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1714 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1715
1716 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1717
1718 ATF_REQUIRE_EQ(0, uvmexp.npages);
1719
1720 /* Now we initialize the segment */
1721 uvm_physseg_init_seg(upm, pgs);
1722
1723 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1724
1725 ATF_REQUIRE_EQ(NULL, extracted_pg);
1726
1727 /* Try fetching the 5th Page in the Segment */
1728 extracted_pg = uvm_physseg_get_pg(upm, 5);
1729
1730 /* Values of phys_addr is n * PAGE_SIZE where n is the page number */
1731 ATF_CHECK_EQ(5 * PAGE_SIZE, extracted_pg->phys_addr);
1732
1733 /* Try fetching the 113th Page in the Segment */
1734 extracted_pg = uvm_physseg_get_pg(upm, 113);
1735
1736 ATF_CHECK_EQ(113 * PAGE_SIZE, extracted_pg->phys_addr);
1737 }
1738
1739 #ifdef __HAVE_PMAP_PHYSSEG
1740 ATF_TC(uvm_physseg_get_pmseg);
ATF_TC_HEAD(uvm_physseg_get_pmseg,tc)1741 ATF_TC_HEAD(uvm_physseg_get_pmseg, tc)
1742 {
1743 atf_tc_set_md_var(tc, "descr", "Tests if the returned pmap_physseg \
1744 struct is correct when fetched by uvm_physseg_get_pmseg() call.");
1745 }
ATF_TC_BODY(uvm_physseg_get_pmseg,tc)1746 ATF_TC_BODY(uvm_physseg_get_pmseg, tc)
1747 {
1748 psize_t npages = (VALID_END_PFN_1 - VALID_START_PFN_1);
1749
1750 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1751
1752 struct pmap_physseg pmseg = { true };
1753
1754 struct pmap_physseg *extracted_pmseg = NULL;
1755
1756 uvm_physseg_t upm;
1757
1758 setup();
1759 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1760 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1761
1762 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1763
1764 ATF_REQUIRE_EQ(0, uvmexp.npages);
1765
1766 /* Now we initialize the segment */
1767 uvm_physseg_init_seg(upm, pgs);
1768
1769 ATF_REQUIRE_EQ(PAGE_COUNT_1M, uvmexp.npages);
1770
1771 ATF_REQUIRE_EQ(NULL, extracted_pmseg);
1772
1773 ATF_REQUIRE_EQ(true, pmseg.dummy_variable);
1774
1775 /* Extract the current pmseg */
1776 extracted_pmseg = uvm_physseg_get_pmseg(upm);
1777
1778 /*
1779 * We can only check if it is not NULL
1780 * We do not know the value it contains
1781 */
1782 ATF_CHECK(NULL != extracted_pmseg);
1783
1784 extracted_pmseg->dummy_variable = pmseg.dummy_variable;
1785
1786 /* Invert value to ensure test integrity */
1787 pmseg.dummy_variable = false;
1788
1789 ATF_REQUIRE_EQ(false, pmseg.dummy_variable);
1790
1791 extracted_pmseg = uvm_physseg_get_pmseg(upm);
1792
1793 ATF_CHECK(NULL != extracted_pmseg);
1794
1795 ATF_CHECK_EQ(true, extracted_pmseg->dummy_variable);
1796 }
1797 #endif
1798
1799 ATF_TC(vm_physseg_find);
ATF_TC_HEAD(vm_physseg_find,tc)1800 ATF_TC_HEAD(vm_physseg_find, tc)
1801 {
1802 atf_tc_set_md_var(tc, "descr", "Tests if the returned segment number \
1803 is correct when an PFN is passed into uvm_physseg_find() call. \
1804 In addition to this the offset of the PFN from the start of \
1805 segment is also set if the parameter is passed in as not NULL.");
1806 }
ATF_TC_BODY(vm_physseg_find,tc)1807 ATF_TC_BODY(vm_physseg_find, tc)
1808 {
1809 psize_t offset = (psize_t) -1;
1810
1811 uvm_physseg_t upm_first, result;
1812 #if VM_PHYSSEG_MAX > 1
1813 uvm_physseg_t upm_second;
1814 #endif
1815
1816 setup();
1817
1818 upm_first = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1819 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1820
1821 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1822
1823 ATF_REQUIRE_EQ(0, uvmexp.npages);
1824
1825 /* This test will be triggered only if there are 2 or more segments. */
1826 #if VM_PHYSSEG_MAX > 1
1827 upm_second = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1828 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1829
1830 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
1831
1832 ATF_REQUIRE_EQ(0, uvmexp.npages);
1833 #endif
1834
1835 /* Under ONE_MEGABYTE is segment upm_first */
1836 result = uvm_physseg_find(atop(ONE_MEGABYTE - 1024), NULL);
1837 ATF_CHECK_EQ(upm_first, result);
1838 ATF_CHECK_EQ(uvm_physseg_get_start(upm_first),
1839 uvm_physseg_get_start(result));
1840 ATF_CHECK_EQ(uvm_physseg_get_end(upm_first),
1841 uvm_physseg_get_end(result));
1842 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_first),
1843 uvm_physseg_get_avail_start(result));
1844 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_first),
1845 uvm_physseg_get_avail_end(result));
1846
1847 ATF_REQUIRE_EQ((psize_t) -1, offset);
1848
1849 /* This test will be triggered only if there are 2 or more segments. */
1850 #if VM_PHYSSEG_MAX > 1
1851 /* Over ONE_MEGABYTE is segment upm_second */
1852 result = uvm_physseg_find(atop(ONE_MEGABYTE + 8192), &offset);
1853 ATF_CHECK_EQ(upm_second, result);
1854 ATF_CHECK_EQ(uvm_physseg_get_start(upm_second),
1855 uvm_physseg_get_start(result));
1856 ATF_CHECK_EQ(uvm_physseg_get_end(upm_second),
1857 uvm_physseg_get_end(result));
1858 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_second),
1859 uvm_physseg_get_avail_start(result));
1860 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_second),
1861 uvm_physseg_get_avail_end(result));
1862
1863 /* Offset is calculated based on PAGE_SIZE */
1864 /* atop(ONE_MEGABYTE + (2 * PAGE_SIZE)) - VALID_START_PFN1 = 2 */
1865 ATF_CHECK_EQ(2, offset);
1866 #else
1867 /* Under ONE_MEGABYTE is segment upm_first */
1868 result = uvm_physseg_find(atop(ONE_MEGABYTE - 12288), &offset);
1869 ATF_CHECK_EQ(upm_first, result);
1870 ATF_CHECK_EQ(uvm_physseg_get_start(upm_first),
1871 uvm_physseg_get_start(result));
1872 ATF_CHECK_EQ(uvm_physseg_get_end(upm_first),
1873 uvm_physseg_get_end(result));
1874 ATF_CHECK_EQ(uvm_physseg_get_avail_start(upm_first),
1875 uvm_physseg_get_avail_start(result));
1876 ATF_CHECK_EQ(uvm_physseg_get_avail_end(upm_first),
1877 uvm_physseg_get_avail_end(result));
1878
1879 /* Offset is calculated based on PAGE_SIZE */
1880 /* atop(ONE_MEGABYTE - (3 * PAGE_SIZE)) - VALID_START_PFN1 = 253 */
1881 ATF_CHECK_EQ(253, offset);
1882 #endif
1883 }
1884
1885 ATF_TC(vm_physseg_find_invalid);
ATF_TC_HEAD(vm_physseg_find_invalid,tc)1886 ATF_TC_HEAD(vm_physseg_find_invalid, tc)
1887 {
1888 atf_tc_set_md_var(tc, "descr", "Tests if the returned segment number \
1889 is (paddr_t) -1 when a non-existent PFN is passed into \
1890 uvm_physseg_find() call.");
1891 }
ATF_TC_BODY(vm_physseg_find_invalid,tc)1892 ATF_TC_BODY(vm_physseg_find_invalid, tc)
1893 {
1894 psize_t offset = (psize_t) -1;
1895
1896 setup();
1897 uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
1898 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
1899
1900 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1901
1902 ATF_REQUIRE_EQ(0, uvmexp.npages);
1903
1904 /* No segments over 3 MB exists at the moment */
1905 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID,
1906 uvm_physseg_find(atop(ONE_MEGABYTE * 3), NULL));
1907
1908 ATF_REQUIRE_EQ((psize_t) -1, offset);
1909
1910 /* No segments over 3 MB exists at the moment */
1911 ATF_CHECK_EQ(UVM_PHYSSEG_TYPE_INVALID,
1912 uvm_physseg_find(atop(ONE_MEGABYTE * 3), &offset));
1913
1914 ATF_CHECK_EQ((psize_t) -1, offset);
1915 }
1916
1917 ATF_TC(uvm_page_physunload_start);
ATF_TC_HEAD(uvm_page_physunload_start,tc)1918 ATF_TC_HEAD(uvm_page_physunload_start, tc)
1919 {
1920 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
1921 call works without a panic(). Unloads from Start of the segment.");
1922 }
ATF_TC_BODY(uvm_page_physunload_start,tc)1923 ATF_TC_BODY(uvm_page_physunload_start, tc)
1924 {
1925 /*
1926 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
1927 */
1928 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
1929
1930 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
1931
1932 paddr_t p = 0;
1933
1934 uvm_physseg_t upm;
1935
1936 setup();
1937 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1938 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
1939
1940 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1941
1942 ATF_REQUIRE_EQ(0, uvmexp.npages);
1943
1944 uvm_physseg_init_seg(upm, pgs);
1945
1946 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
1947
1948 /*
1949 * When called for first time, uvm_page_physload() removes the first PFN
1950 *
1951 * New avail start will be VALID_AVAIL_START_PFN_2 + 1
1952 */
1953 ATF_CHECK_EQ(VALID_START_PFN_2, atop(p));
1954
1955 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
1956 uvm_physseg_get_avail_start(upm));
1957
1958 ATF_CHECK_EQ(VALID_START_PFN_2 + 1, uvm_physseg_get_start(upm));
1959
1960 /* Rest of the stuff should remain the same */
1961 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
1962 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
1963 }
1964
1965 ATF_TC(uvm_page_physunload_end);
ATF_TC_HEAD(uvm_page_physunload_end,tc)1966 ATF_TC_HEAD(uvm_page_physunload_end, tc)
1967 {
1968 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
1969 call works without a panic(). Unloads from End of the segment.");
1970 }
ATF_TC_BODY(uvm_page_physunload_end,tc)1971 ATF_TC_BODY(uvm_page_physunload_end, tc)
1972 {
1973 /*
1974 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
1975 */
1976 paddr_t p = 0;
1977
1978 uvm_physseg_t upm;
1979
1980 setup();
1981 /* Note: start != avail_start to remove from end. */
1982 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
1983 VALID_AVAIL_START_PFN_2 + 1, VALID_AVAIL_END_PFN_2,
1984 VM_FREELIST_DEFAULT);
1985
1986 p = 0;
1987
1988 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
1989
1990 ATF_REQUIRE_EQ(0, uvmexp.npages);
1991
1992 ATF_REQUIRE(
1993 uvm_physseg_get_avail_start(upm) != uvm_physseg_get_start(upm));
1994
1995 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
1996
1997 /*
1998 * Remember if X is the upper limit the actual valid pointer is X - 1
1999 *
2000 * For example if 256 is the upper limit for 1MB memory, last valid
2001 * pointer is 256 - 1 = 255
2002 */
2003
2004 ATF_CHECK_EQ(VALID_END_PFN_2 - 1, atop(p));
2005
2006 /*
2007 * When called for second time, uvm_page_physload() removes the last PFN
2008 *
2009 * New avail end will be VALID_AVAIL_END_PFN_2 - 1
2010 * New end will be VALID_AVAIL_PFN_2 - 1
2011 */
2012
2013 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2 - 1, uvm_physseg_get_avail_end(upm));
2014
2015 ATF_CHECK_EQ(VALID_END_PFN_2 - 1, uvm_physseg_get_end(upm));
2016
2017 /* Rest of the stuff should remain the same */
2018 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
2019 uvm_physseg_get_avail_start(upm));
2020 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2021 }
2022
2023 ATF_TC(uvm_page_physunload_none);
ATF_TC_HEAD(uvm_page_physunload_none,tc)2024 ATF_TC_HEAD(uvm_page_physunload_none, tc)
2025 {
2026 atf_tc_set_md_var(tc, "descr", "Tests if the basic uvm_page_physunload()\
2027 call works without a panic(). Does not unload from start or end \
2028 because of non-aligned start / avail_start and end / avail_end \
2029 respectively.");
2030 }
ATF_TC_BODY(uvm_page_physunload_none,tc)2031 ATF_TC_BODY(uvm_page_physunload_none, tc)
2032 {
2033 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
2034
2035 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
2036
2037 paddr_t p = 0;
2038
2039 uvm_physseg_t upm;
2040
2041 setup();
2042 /*
2043 * Note: start != avail_start and end != avail_end.
2044 *
2045 * This prevents any unload from occurring.
2046 */
2047 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2048 VALID_AVAIL_START_PFN_2 + 1, VALID_AVAIL_END_PFN_2 - 1,
2049 VM_FREELIST_DEFAULT);
2050
2051 p = 0;
2052
2053 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2054
2055 ATF_REQUIRE_EQ(0, uvmexp.npages);
2056
2057 ATF_REQUIRE(
2058 uvm_physseg_get_avail_start(upm) != uvm_physseg_get_start(upm));
2059
2060 uvm_physseg_init_seg(upm, pgs);
2061
2062 ATF_CHECK_EQ(false, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2063
2064 /* uvm_page_physload() will no longer unload memory */
2065 ATF_CHECK_EQ(0, p);
2066
2067 /* Rest of the stuff should remain the same */
2068 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2 + 1,
2069 uvm_physseg_get_avail_start(upm));
2070 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2 - 1,
2071 uvm_physseg_get_avail_end(upm));
2072 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2073 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2074 }
2075
2076 ATF_TC(uvm_page_physunload_delete_start);
ATF_TC_HEAD(uvm_page_physunload_delete_start,tc)2077 ATF_TC_HEAD(uvm_page_physunload_delete_start, tc)
2078 {
2079 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2080 works when the segment gets small enough to be deleted scenario. \
2081 NOTE: This one works deletes from start.");
2082 }
ATF_TC_BODY(uvm_page_physunload_delete_start,tc)2083 ATF_TC_BODY(uvm_page_physunload_delete_start, tc)
2084 {
2085 /*
2086 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2087 */
2088 paddr_t p = 0;
2089
2090 uvm_physseg_t upm;
2091
2092 setup();
2093
2094 /*
2095 * Setup the Nuke from Starting point
2096 */
2097
2098 upm = uvm_page_physload(VALID_END_PFN_1 - 1, VALID_END_PFN_1,
2099 VALID_AVAIL_END_PFN_1 - 1, VALID_AVAIL_END_PFN_1,
2100 VM_FREELIST_DEFAULT);
2101
2102 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2103
2104 ATF_REQUIRE_EQ(0, uvmexp.npages);
2105
2106 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2107 #if VM_PHYSSEG_MAX > 1
2108 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2109 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2110
2111 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2112 #endif
2113
2114 #if VM_PHYSSEG_MAX == 1
2115 atf_tc_expect_signal(SIGABRT,
2116 "cannot uvm_page_physunload() the last segment");
2117 #endif
2118
2119 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2120
2121 ATF_CHECK_EQ(VALID_END_PFN_1 - 1, atop(p));
2122
2123 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2124
2125 /* The only node now is the one we inserted second. */
2126 upm = uvm_physseg_get_first();
2127
2128 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2129 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2130 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
2131 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
2132 }
2133
2134 ATF_TC(uvm_page_physunload_delete_end);
ATF_TC_HEAD(uvm_page_physunload_delete_end,tc)2135 ATF_TC_HEAD(uvm_page_physunload_delete_end, tc)
2136 {
2137 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2138 works when the segment gets small enough to be deleted scenario. \
2139 NOTE: This one works deletes from end.");
2140 }
ATF_TC_BODY(uvm_page_physunload_delete_end,tc)2141 ATF_TC_BODY(uvm_page_physunload_delete_end, tc)
2142 {
2143 /*
2144 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2145 */
2146
2147 paddr_t p = 0;
2148
2149 uvm_physseg_t upm;
2150
2151 setup();
2152
2153 /*
2154 * Setup the Nuke from Ending point
2155 */
2156
2157 upm = uvm_page_physload(VALID_START_PFN_1, VALID_START_PFN_1 + 2,
2158 VALID_AVAIL_START_PFN_1, VALID_AVAIL_START_PFN_1 + 2,
2159 VM_FREELIST_DEFAULT);
2160
2161 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2162
2163 ATF_REQUIRE_EQ(0, uvmexp.npages);
2164
2165 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2166 #if VM_PHYSSEG_MAX > 1
2167 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2168 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2169
2170 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2171 #endif
2172
2173 #if VM_PHYSSEG_MAX == 1
2174 atf_tc_expect_signal(SIGABRT,
2175 "cannot uvm_page_physunload() the last segment");
2176 #endif
2177
2178 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2179
2180 ATF_CHECK_EQ(VALID_START_PFN_1, atop(p));
2181
2182 p = 0;
2183
2184 ATF_CHECK_EQ(true, uvm_page_physunload(upm, VM_FREELIST_DEFAULT, &p));
2185
2186 ATF_CHECK_EQ(VALID_START_PFN_1 + 1, atop(p));
2187
2188 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2189
2190 /* The only node now is the one we inserted second. */
2191 upm = uvm_physseg_get_first();
2192
2193 ATF_CHECK_EQ(VALID_START_PFN_2, uvm_physseg_get_start(upm));
2194 ATF_CHECK_EQ(VALID_END_PFN_2, uvm_physseg_get_end(upm));
2195 ATF_CHECK_EQ(VALID_AVAIL_START_PFN_2, uvm_physseg_get_avail_start(upm));
2196 ATF_CHECK_EQ(VALID_AVAIL_END_PFN_2, uvm_physseg_get_avail_end(upm));
2197 }
2198
2199 ATF_TC(uvm_page_physunload_invalid);
ATF_TC_HEAD(uvm_page_physunload_invalid,tc)2200 ATF_TC_HEAD(uvm_page_physunload_invalid, tc)
2201 {
2202 atf_tc_set_md_var(tc, "descr", "Tests if the uvm_page_physunload() \
2203 fails when then Free list does not match.");
2204 }
ATF_TC_BODY(uvm_page_physunload_invalid,tc)2205 ATF_TC_BODY(uvm_page_physunload_invalid, tc)
2206 {
2207 psize_t npages = (VALID_END_PFN_2 - VALID_START_PFN_2);
2208
2209 struct vm_page *pgs = malloc(sizeof(struct vm_page) * npages);
2210
2211 paddr_t p = 0;
2212
2213 uvm_physseg_t upm;
2214
2215 setup();
2216 upm = uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2217 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2218
2219 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2220
2221 ATF_REQUIRE_EQ(0, uvmexp.npages);
2222
2223 uvm_physseg_init_seg(upm, pgs);
2224
2225 ATF_CHECK_EQ(false, uvm_page_physunload(upm, VM_FREELIST_FIRST4G, &p));
2226 }
2227
2228 ATF_TC(uvm_page_physunload_force);
ATF_TC_HEAD(uvm_page_physunload_force,tc)2229 ATF_TC_HEAD(uvm_page_physunload_force, tc)
2230 {
2231 atf_tc_set_md_var(tc, "descr", "Tests if the basic \
2232 uvm_page_physunload_force() including delete works without.");
2233 }
ATF_TC_BODY(uvm_page_physunload_force,tc)2234 ATF_TC_BODY(uvm_page_physunload_force, tc)
2235 {
2236 /*
2237 * Would uvmexp.npages reduce everytime an uvm_page_physunload is called?
2238 */
2239 paddr_t p = 0;
2240
2241 uvm_physseg_t upm;
2242
2243 setup();
2244 upm = uvm_page_physload(VALID_START_PFN_1, VALID_END_PFN_1,
2245 VALID_AVAIL_START_PFN_1, VALID_AVAIL_END_PFN_1, VM_FREELIST_DEFAULT);
2246
2247 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2248
2249 ATF_REQUIRE_EQ(0, uvmexp.npages);
2250
2251 /* Insert more than one segment iff VM_PHYSSEG_MAX > 1 */
2252 #if VM_PHYSSEG_MAX > 1
2253 /*
2254 * We have couple of physloads done this is because of the fact that if
2255 * we physunload all the PFs from a given range and we have only one
2256 * segment in total a panic() is called
2257 */
2258 uvm_page_physload(VALID_START_PFN_2, VALID_END_PFN_2,
2259 VALID_AVAIL_START_PFN_2, VALID_AVAIL_END_PFN_2, VM_FREELIST_DEFAULT);
2260
2261 ATF_REQUIRE_EQ(2, uvm_physseg_get_entries());
2262 #endif
2263
2264 #if VM_PHYSSEG_MAX == 1
2265 atf_tc_expect_signal(SIGABRT,
2266 "cannot uvm_page_physunload() the last segment");
2267 #endif
2268
2269 ATF_REQUIRE_EQ(VALID_AVAIL_START_PFN_1,
2270 uvm_physseg_get_avail_start(upm));
2271
2272 for(paddr_t i = VALID_AVAIL_START_PFN_1;
2273 i < VALID_AVAIL_END_PFN_1; i++) {
2274 ATF_CHECK_EQ(true,
2275 uvm_page_physunload_force(upm, VM_FREELIST_DEFAULT, &p));
2276 ATF_CHECK_EQ(i, atop(p));
2277
2278 if(i + 1 < VALID_AVAIL_END_PFN_1)
2279 ATF_CHECK_EQ(i + 1, uvm_physseg_get_avail_start(upm));
2280 }
2281
2282 /*
2283 * Now we try to retrieve the segment, which has been removed
2284 * from the system through force unloading all the pages inside it.
2285 */
2286 upm = uvm_physseg_find(VALID_AVAIL_END_PFN_1 - 1, NULL);
2287
2288 /* It should no longer exist */
2289 #if defined(UVM_HOTPLUG)
2290 ATF_CHECK_EQ(NULL, upm);
2291 #else
2292 ATF_CHECK_EQ(-1, upm);
2293 #endif
2294
2295 ATF_CHECK_EQ(1, uvm_physseg_get_entries());
2296 }
2297
2298 ATF_TC(uvm_page_physunload_force_invalid);
ATF_TC_HEAD(uvm_page_physunload_force_invalid,tc)2299 ATF_TC_HEAD(uvm_page_physunload_force_invalid, tc)
2300 {
2301 atf_tc_set_md_var(tc, "descr", "Tests if the invalid conditions for \
2302 uvm_page_physunload_force_invalid().");
2303 }
ATF_TC_BODY(uvm_page_physunload_force_invalid,tc)2304 ATF_TC_BODY(uvm_page_physunload_force_invalid, tc)
2305 {
2306 paddr_t p = 0;
2307
2308 uvm_physseg_t upm;
2309
2310 setup();
2311 upm = uvm_page_physload(VALID_START_PFN_2, VALID_START_PFN_2+ 1,
2312 VALID_START_PFN_2, VALID_START_PFN_2, VM_FREELIST_DEFAULT);
2313
2314 ATF_REQUIRE_EQ(1, uvm_physseg_get_entries());
2315
2316 ATF_REQUIRE_EQ(0, uvmexp.npages);
2317
2318 ATF_CHECK_EQ(false,
2319 uvm_page_physunload_force(upm, VM_FREELIST_DEFAULT, &p));
2320
2321 ATF_CHECK_EQ(0, p);
2322 }
2323
ATF_TP_ADD_TCS(tp)2324 ATF_TP_ADD_TCS(tp)
2325 {
2326 #if defined(UVM_HOTPLUG)
2327 /* Internal */
2328 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_atboot_mismatch);
2329 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_atboot_overrun);
2330 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_sanity);
2331 ATF_TP_ADD_TC(tp, uvm_physseg_free_atboot_mismatch);
2332 ATF_TP_ADD_TC(tp, uvm_physseg_free_sanity);
2333 #if VM_PHYSSEG_MAX > 1
2334 ATF_TP_ADD_TC(tp, uvm_physseg_atboot_free_leak);
2335 #endif
2336 #endif /* UVM_HOTPLUG */
2337
2338 ATF_TP_ADD_TC(tp, uvm_physseg_plug);
2339 ATF_TP_ADD_TC(tp, uvm_physseg_unplug);
2340
2341 /* Exported */
2342 ATF_TP_ADD_TC(tp, uvm_physseg_init);
2343 ATF_TP_ADD_TC(tp, uvm_page_physload_preload);
2344 ATF_TP_ADD_TC(tp, uvm_page_physload_postboot);
2345 ATF_TP_ADD_TC(tp, uvm_physseg_handle_immutable);
2346 ATF_TP_ADD_TC(tp, uvm_physseg_seg_chomp_slab);
2347 ATF_TP_ADD_TC(tp, uvm_physseg_alloc_from_slab);
2348 ATF_TP_ADD_TC(tp, uvm_physseg_init_seg);
2349 ATF_TP_ADD_TC(tp, uvm_physseg_get_start);
2350 ATF_TP_ADD_TC(tp, uvm_physseg_get_start_invalid);
2351 ATF_TP_ADD_TC(tp, uvm_physseg_get_end);
2352 ATF_TP_ADD_TC(tp, uvm_physseg_get_end_invalid);
2353 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_start);
2354 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_start_invalid);
2355 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_end);
2356 ATF_TP_ADD_TC(tp, uvm_physseg_get_avail_end_invalid);
2357 ATF_TP_ADD_TC(tp, uvm_physseg_get_next);
2358 ATF_TP_ADD_TC(tp, uvm_physseg_get_next_invalid);
2359 ATF_TP_ADD_TC(tp, uvm_physseg_get_prev);
2360 ATF_TP_ADD_TC(tp, uvm_physseg_get_prev_invalid);
2361 ATF_TP_ADD_TC(tp, uvm_physseg_get_first);
2362 ATF_TP_ADD_TC(tp, uvm_physseg_get_last);
2363 ATF_TP_ADD_TC(tp, uvm_physseg_valid);
2364 ATF_TP_ADD_TC(tp, uvm_physseg_valid_invalid);
2365 ATF_TP_ADD_TC(tp, uvm_physseg_get_highest);
2366 ATF_TP_ADD_TC(tp, uvm_physseg_get_free_list);
2367 ATF_TP_ADD_TC(tp, uvm_physseg_get_start_hint);
2368 ATF_TP_ADD_TC(tp, uvm_physseg_set_start_hint);
2369 ATF_TP_ADD_TC(tp, uvm_physseg_set_start_hint_invalid);
2370 ATF_TP_ADD_TC(tp, uvm_physseg_get_pg);
2371
2372 #ifdef __HAVE_PMAP_PHYSSEG
2373 ATF_TP_ADD_TC(tp, uvm_physseg_get_pmseg);
2374 #endif
2375 ATF_TP_ADD_TC(tp, vm_physseg_find);
2376 ATF_TP_ADD_TC(tp, vm_physseg_find_invalid);
2377
2378 ATF_TP_ADD_TC(tp, uvm_page_physunload_start);
2379 ATF_TP_ADD_TC(tp, uvm_page_physunload_end);
2380 ATF_TP_ADD_TC(tp, uvm_page_physunload_none);
2381 ATF_TP_ADD_TC(tp, uvm_page_physunload_delete_start);
2382 ATF_TP_ADD_TC(tp, uvm_page_physunload_delete_end);
2383 ATF_TP_ADD_TC(tp, uvm_page_physunload_invalid);
2384 ATF_TP_ADD_TC(tp, uvm_page_physunload_force);
2385 ATF_TP_ADD_TC(tp, uvm_page_physunload_force_invalid);
2386
2387 return atf_no_error();
2388 }
2389