1*4bfe0439Sad /* $NetBSD: uvm_page_array.h,v 1.3 2020/05/25 21:15:10 ad Exp $ */ 2881d12e6Sad 3881d12e6Sad /*- 4881d12e6Sad * Copyright (c)2011 YAMAMOTO Takashi, 5881d12e6Sad * All rights reserved. 6881d12e6Sad * 7881d12e6Sad * Redistribution and use in source and binary forms, with or without 8881d12e6Sad * modification, are permitted provided that the following conditions 9881d12e6Sad * are met: 10881d12e6Sad * 1. Redistributions of source code must retain the above copyright 11881d12e6Sad * notice, this list of conditions and the following disclaimer. 12881d12e6Sad * 2. Redistributions in binary form must reproduce the above copyright 13881d12e6Sad * notice, this list of conditions and the following disclaimer in the 14881d12e6Sad * documentation and/or other materials provided with the distribution. 15881d12e6Sad * 16881d12e6Sad * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 17881d12e6Sad * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18881d12e6Sad * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 19881d12e6Sad * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 20881d12e6Sad * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21881d12e6Sad * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 22881d12e6Sad * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 23881d12e6Sad * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 24881d12e6Sad * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 25881d12e6Sad * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26881d12e6Sad * SUCH DAMAGE. 27881d12e6Sad */ 28881d12e6Sad 29881d12e6Sad #if !defined(_UVM_UVM_PAGE_ARRAY_H_) 30881d12e6Sad #define _UVM_UVM_PAGE_ARRAY_H_ 31881d12e6Sad 32881d12e6Sad /* 33881d12e6Sad * uvm_page_array: an array of pages. 34881d12e6Sad * 35881d12e6Sad * these structure and functions simply manipulate struct vm_page pointers. 36881d12e6Sad * it's caller's responsibity to acquire and keep the object lock so that 37881d12e6Sad * the result is valid. 38881d12e6Sad * 39881d12e6Sad * typical usage: 40881d12e6Sad * 41881d12e6Sad * struct uvm_page_array a; 42881d12e6Sad * 43*4bfe0439Sad * uvm_page_array_init(&a, uobj, ...); 44*4bfe0439Sad * while ((pg = uvm_page_array_fill_and_peek(&a, off, 0)) != NULL) { 45881d12e6Sad * off = pg->offset + PAGE_SIZE; 46881d12e6Sad * do_something(pg); 47881d12e6Sad * uvm_page_array_advance(&a); 48881d12e6Sad * } 49881d12e6Sad * uvm_page_array_fini(&a); 50*4bfe0439Sad * 51*4bfe0439Sad * if scanning forwards the "off" argument may not go backwards. 52*4bfe0439Sad * if scanning backwards, the "off" argument may not go forwards. 53881d12e6Sad */ 54881d12e6Sad 55881d12e6Sad struct vm_page; 56881d12e6Sad 57881d12e6Sad struct uvm_page_array { 58881d12e6Sad unsigned int ar_npages; /* valid elements in ar_pages */ 59881d12e6Sad unsigned int ar_idx; /* index in ar_pages */ 60*4bfe0439Sad struct uvm_object *ar_uobj; 61*4bfe0439Sad unsigned int ar_flags; 62*4bfe0439Sad voff_t ar_lastoff; 63*4bfe0439Sad struct vm_page *ar_pages[16]; /* XXX tune */ 64881d12e6Sad }; 65881d12e6Sad 66*4bfe0439Sad void uvm_page_array_init(struct uvm_page_array *, struct uvm_object *, 67*4bfe0439Sad unsigned int); 68881d12e6Sad void uvm_page_array_fini(struct uvm_page_array *); 69881d12e6Sad void uvm_page_array_clear(struct uvm_page_array *); 70881d12e6Sad struct vm_page *uvm_page_array_peek(struct uvm_page_array *); 71881d12e6Sad void uvm_page_array_advance(struct uvm_page_array *); 72*4bfe0439Sad int uvm_page_array_fill(struct uvm_page_array *, voff_t, unsigned int); 73881d12e6Sad struct vm_page *uvm_page_array_fill_and_peek(struct uvm_page_array *, 74*4bfe0439Sad voff_t, unsigned int); 75881d12e6Sad 76881d12e6Sad /* 77881d12e6Sad * flags for uvm_page_array_fill and uvm_page_array_fill_and_peek 78881d12e6Sad */ 79881d12e6Sad #define UVM_PAGE_ARRAY_FILL_DIRTY 1 /* dirty pages */ 80881d12e6Sad #define UVM_PAGE_ARRAY_FILL_WRITEBACK 2 /* dirty or written-back */ 81881d12e6Sad #define UVM_PAGE_ARRAY_FILL_DENSE 4 /* stop on a hole */ 82881d12e6Sad #define UVM_PAGE_ARRAY_FILL_BACKWARD 8 /* descend order */ 83881d12e6Sad 84881d12e6Sad #endif /* defined(_UVM_UVM_ARRAY_H_) */ 85