Lines Matching refs:pv
981 pmap_enter_pv(struct vm_page_md *md, paddr_t pa, struct pv_entry *pv, pmap_t pm, in pmap_enter_pv() argument
988 (uintptr_t)pv, flags, 0, 0); in pmap_enter_pv()
992 pv->pv_pmap = pm; in pmap_enter_pv()
993 pv->pv_va = va; in pmap_enter_pv()
994 pv->pv_flags = flags; in pmap_enter_pv()
1011 SLIST_NEXT(pv, pv_link) = *pvp; /* add to ... */ in pmap_enter_pv()
1012 *pvp = pv; /* ... locked list */ in pmap_enter_pv()
1015 if ((pv->pv_flags & PVF_KWRITE) == PVF_KWRITE) in pmap_enter_pv()
1059 if (pv->pv_flags & PVF_WIRED) in pmap_enter_pv()
1072 struct pv_entry *pv; in pmap_find_pv() local
1074 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_find_pv()
1075 if (pm == pv->pv_pmap && va == pv->pv_va) in pmap_find_pv()
1079 return pv; in pmap_find_pv()
1099 struct pv_entry *pv, **prevptr; in pmap_remove_pv() local
1102 pv = *prevptr; in pmap_remove_pv()
1104 while (pv) { in pmap_remove_pv()
1105 if (pv->pv_pmap == pm && pv->pv_va == va) { /* match? */ in pmap_remove_pv()
1107 (uintptr_t)pm, (uintptr_t)md, pv->pv_flags, 0); in pmap_remove_pv()
1108 if (pv->pv_flags & PVF_WIRED) { in pmap_remove_pv()
1111 *prevptr = SLIST_NEXT(pv, pv_link); /* remove it! */ in pmap_remove_pv()
1114 if (pv->pv_flags & PVF_WRITE) in pmap_remove_pv()
1119 if (pv->pv_flags & PVF_WRITE) in pmap_remove_pv()
1136 } else if (pv->pv_flags & PVF_WRITE) { in pmap_remove_pv()
1144 prevptr = &SLIST_NEXT(pv, pv_link); /* previous pointer */ in pmap_remove_pv()
1145 pv = *prevptr; /* advance */ in pmap_remove_pv()
1171 return pv; in pmap_remove_pv()
1841 struct pv_entry *pv; in pmap_vac_me_kpmap() local
1850 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_vac_me_kpmap()
1851 if (pv->pv_pmap != pm && (pv->pv_flags & PVF_NC) == 0) in pmap_vac_me_kpmap()
1871 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_vac_me_kpmap()
1878 if (pv->pv_pmap == pm || pv->pv_pmap == last_pmap) in pmap_vac_me_kpmap()
1887 (pv->pv_flags & (PVF_NC | PVF_WRITE)) == in pmap_vac_me_kpmap()
1897 (pv->pv_flags & (PVF_NC | PVF_WRITE)) == 0) in pmap_vac_me_kpmap()
1906 pmap_vac_me_user(md, pa, (last_pmap = pv->pv_pmap), 0); in pmap_vac_me_kpmap()
1920 struct pv_entry *pv, *npv = NULL; in pmap_vac_me_user() local
1934 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_vac_me_user()
1936 if (pm == pv->pv_pmap || kpmap == pv->pv_pmap) { in pmap_vac_me_user()
1938 npv = pv; in pmap_vac_me_user()
1941 if ((pv->pv_flags & PVF_NC) == 0) { in pmap_vac_me_user()
1943 if (kpmap == pv->pv_pmap) in pmap_vac_me_user()
1948 if (pv->pv_flags & PVF_WRITE) in pmap_vac_me_user()
1950 } else if (pv->pv_flags & PVF_WRITE) in pmap_vac_me_user()
1966 for (pv = npv; pv; pv = SLIST_NEXT(pv, pv_link)) { in pmap_vac_me_user()
1967 if ((pm != pv->pv_pmap && kpmap != pv->pv_pmap) || in pmap_vac_me_user()
1968 (pv->pv_flags & PVF_NC)) in pmap_vac_me_user()
1971 pv->pv_flags |= PVF_NC; in pmap_vac_me_user()
1974 = pmap_get_l2_bucket(pv->pv_pmap, pv->pv_va); in pmap_vac_me_user()
1977 = &l2b->l2b_kva[l2pte_index(pv->pv_va)]; in pmap_vac_me_user()
1981 if ((va != pv->pv_va || pm != pv->pv_pmap) in pmap_vac_me_user()
1983 pmap_cache_wbinv_page(pv->pv_pmap, pv->pv_va, in pmap_vac_me_user()
1984 true, pv->pv_flags); in pmap_vac_me_user()
1985 pmap_tlb_flush_SE(pv->pv_pmap, pv->pv_va, in pmap_vac_me_user()
1986 pv->pv_flags); in pmap_vac_me_user()
1990 PTE_SYNC_CURRENT(pv->pv_pmap, ptep); in pmap_vac_me_user()
1998 for (pv = npv; pv; pv = SLIST_NEXT(pv, pv_link)) { in pmap_vac_me_user()
1999 if (!(pv->pv_flags & PVF_NC) || (pm != pv->pv_pmap && in pmap_vac_me_user()
2000 (kpmap != pv->pv_pmap || other_writable))) in pmap_vac_me_user()
2003 pv->pv_flags &= ~PVF_NC; in pmap_vac_me_user()
2006 = pmap_get_l2_bucket(pv->pv_pmap, pv->pv_va); in pmap_vac_me_user()
2009 = &l2b->l2b_kva[l2pte_index(pv->pv_va)]; in pmap_vac_me_user()
2015 pmap_tlb_flush_SE(pv->pv_pmap, pv->pv_va, in pmap_vac_me_user()
2016 pv->pv_flags); in pmap_vac_me_user()
2020 PTE_SYNC_CURRENT(pv->pv_pmap, ptep); in pmap_vac_me_user()
2032 struct pv_entry *pv; in pmap_vac_me_harder() local
2062 pv = SLIST_FIRST(&md->pvh_list); in pmap_vac_me_harder()
2067 KASSERT(pv); in pmap_vac_me_harder()
2068 tst_mask = pv->pv_va; in pmap_vac_me_harder()
2069 pv = SLIST_NEXT(pv, pv_link); in pmap_vac_me_harder()
2076 for (; pv && !bad_alias; pv = SLIST_NEXT(pv, pv_link)) { in pmap_vac_me_harder()
2078 if (tst_mask != (pv->pv_va & arm_cache_prefer_mask)) in pmap_vac_me_harder()
2091 for (; pv; pv = SLIST_NEXT(pv, pv_link)) { in pmap_vac_me_harder()
2092 if (tst_mask != (pv->pv_va & arm_cache_prefer_mask)) { in pmap_vac_me_harder()
2103 if (!bad_alias && pv != NULL) in pmap_vac_me_harder()
2112 SLIST_FOREACH(pv, &md->pvh_list, pv_link) in pmap_vac_me_harder()
2113 KDASSERT(((tst_mask ^ pv->pv_va) & arm_cache_prefer_mask) == 0); in pmap_vac_me_harder()
2144 pv = SLIST_FIRST(&md->pvh_list); in pmap_vac_me_harder()
2145 tst_mask = (md->pvh_attrs ^ pv->pv_va) in pmap_vac_me_harder()
2198 SLIST_FOREACH(pv, &md->pvh_list, pv_link) in pmap_vac_me_harder()
2199 KDASSERT(((tst_mask ^ pv->pv_va) & arm_cache_prefer_mask) == 0); in pmap_vac_me_harder()
2259 pv = SLIST_FIRST(&md->pvh_list); in pmap_vac_me_harder()
2261 KASSERT(pv); in pmap_vac_me_harder()
2267 if (SLIST_NEXT(pv, pv_link) == NULL) { in pmap_vac_me_harder()
2292 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_vac_me_harder()
2293 struct l2_bucket * const l2b = pmap_get_l2_bucket(pv->pv_pmap, in pmap_vac_me_harder()
2294 pv->pv_va); in pmap_vac_me_harder()
2296 pt_entry_t * const ptep = &l2b->l2b_kva[l2pte_index(pv->pv_va)]; in pmap_vac_me_harder()
2300 pv->pv_flags |= PVF_NC; in pmap_vac_me_harder()
2302 pv->pv_flags &= ~PVF_NC; in pmap_vac_me_harder()
2310 pmap_tlb_flush_SE(pv->pv_pmap, pv->pv_va, pv->pv_flags); in pmap_vac_me_harder()
2314 PTE_SYNC_CURRENT(pv->pv_pmap, ptep); in pmap_vac_me_harder()
2329 struct pv_entry *pv; in pmap_clearbit() local
2377 for (pv = SLIST_FIRST(&md->pvh_list); pv != NULL;) { in pmap_clearbit()
2378 pmap_t pm = pv->pv_pmap; in pmap_clearbit()
2379 const vaddr_t va = pv->pv_va; in pmap_clearbit()
2380 const u_int oflags = pv->pv_flags; in pmap_clearbit()
2386 pv = SLIST_NEXT(pv, pv_link); in pmap_clearbit()
2409 pv = SLIST_FIRST(&md->pvh_list); in pmap_clearbit()
2412 pv->pv_flags &= ~maskbits; in pmap_clearbit()
2431 (uintptr_t)pv, (uintptr_t)pm, va, oflags); in pmap_clearbit()
2451 pv->pv_flags &= ~PVF_NC; in pmap_clearbit()
2540 pv = SLIST_NEXT(pv, pv_link); in pmap_clearbit()
2589 struct pv_entry *pv; in pmap_clean_page() local
2601 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_clean_page()
2602 if (pmap_is_current(pv->pv_pmap)) { in pmap_clean_page()
2603 flags |= pv->pv_flags; in pmap_clean_page()
2608 if (pv->pv_flags & PVF_NC) { in pmap_clean_page()
2613 } else if (is_src && (pv->pv_flags & PVF_WRITE) == 0) in pmap_clean_page()
2619 page_to_clean = pv->pv_va; in pmap_clean_page()
2620 pm_to_clean = pv->pv_pmap; in pmap_clean_page()
2849 struct pv_entry *pv; in pmap_page_remove() local
2894 for (pv = *pvp; pv != NULL;) { in pmap_page_remove()
2895 pmap_t pm = pv->pv_pmap; in pmap_page_remove()
2902 if (pm == pmap_kernel() && PV_IS_KENTRY_P(pv->pv_flags)) { in pmap_page_remove()
2904 pvp = &SLIST_NEXT(pv, pv_link); in pmap_page_remove()
2905 pv = *pvp; in pmap_page_remove()
2929 pv = *pvp; in pmap_page_remove()
2935 if (pv->pv_flags & PVF_WRITE) in pmap_page_remove()
2942 *pvp = SLIST_NEXT(pv, pv_link); /* remove from list */ in pmap_page_remove()
2947 l2b = pmap_get_l2_bucket(pm, pv->pv_va); in pmap_page_remove()
2948 KASSERTMSG(l2b != NULL, "%#lx", pv->pv_va); in pmap_page_remove()
2950 ptep = &l2b->l2b_kva[l2pte_index(pv->pv_va)]; in pmap_page_remove()
2958 if (pv->pv_flags & PVF_WIRED) in pmap_page_remove()
2961 flags |= pv->pv_flags; in pmap_page_remove()
2970 pmap_tlb_invalidate_addr(pm, pv->pv_va); in pmap_page_remove()
2977 pool_put(&pmap_pv_pool, pv); in pmap_page_remove()
2984 pv = *pvp; in pmap_page_remove()
3265 struct pv_entry *pv; in pmap_enter() local
3279 pv = pmap_remove_pv(omd, opa, pm, va); in pmap_enter()
3281 oflags = pv->pv_flags; in pmap_enter()
3296 pv = new_pv; in pmap_enter()
3298 if (pv == NULL) { in pmap_enter()
3314 pmap_enter_pv(md, pa, pv, pm, va, nflags); in pmap_enter()
3500 struct pv_entry *pv, *npv; in pmap_remove() local
3573 pv = pmap_remove_pv(md, pa, pm, sva); in pmap_remove()
3576 if (pv != NULL) { in pmap_remove()
3578 flags = pv->pv_flags; in pmap_remove()
3581 pv, pv_link); in pmap_remove()
3685 SLIST_FOREACH_SAFE(pv, &opv_list, pv_link, npv) { in pmap_remove()
3686 pool_put(&pmap_pv_pool, pv); in pmap_remove()
3696 struct pv_entry *pv; in pmap_kremove_pg() local
3702 pv = pmap_remove_pv(md, pa, pmap_kernel(), va); in pmap_kremove_pg()
3703 KASSERTMSG(pv, "pg %p (pa #%lx) va %#lx", pg, pa, va); in pmap_kremove_pg()
3704 KASSERT(PV_IS_KENTRY_P(pv->pv_flags)); in pmap_kremove_pg()
3712 && (pv->pv_flags & PVF_WRITE) != 0) { in pmap_kremove_pg()
3723 return pv; in pmap_kremove_pg()
3744 struct pv_entry *pv = NULL; in pmap_kenter_pa() local
3790 pv = pmap_kremove_pg(opg, va); in pmap_kenter_pa()
3841 KASSERT(pv == NULL); in pmap_kenter_pa()
3870 if (pv == NULL) { in pmap_kenter_pa()
3871 pv = pool_get(&pmap_pv_pool, PR_NOWAIT); in pmap_kenter_pa()
3872 KASSERT(pv != NULL); in pmap_kenter_pa()
3875 pmap_enter_pv(md, pa, pv, pmap_kernel(), va, in pmap_kenter_pa()
3888 if (pv != NULL) in pmap_kenter_pa()
3889 pool_put(&pmap_pv_pool, pv); in pmap_kenter_pa()
4407 struct pv_entry * const pv = pmap_find_pv(md, pm, va); in pmap_prefetchabt_fixup() local
4408 KASSERT(pv != NULL); in pmap_prefetchabt_fixup()
4410 if (PV_IS_EXEC_P(pv->pv_flags)) { in pmap_prefetchabt_fixup()
4509 struct pv_entry *pv; in pmap_fault_fixup() local
4522 pv = pmap_find_pv(md, pm, va); in pmap_fault_fixup()
4523 if (pv == NULL || PV_IS_KENTRY_P(pv->pv_flags)) { in pmap_fault_fixup()
4536 if ((pv->pv_flags & PVF_WRITE) == 0) { in pmap_fault_fixup()
4543 pv->pv_flags |= PVF_REF | PVF_MOD; in pmap_fault_fixup()
4599 struct pv_entry *pv = pmap_find_pv(md, pm, va); in pmap_fault_fixup() local
4600 if (pv == NULL || PV_IS_KENTRY_P(pv->pv_flags)) { in pmap_fault_fixup()
4607 pv->pv_flags |= PVF_REF; in pmap_fault_fixup()
4624 if ((pv->pv_flags & PVF_EXEC) == 0) { in pmap_fault_fixup()
4665 struct pv_entry * const pv = pmap_find_pv(md, pm, va); in pmap_fault_fixup() local
4666 if (pv == NULL || (pv->pv_flags & PVF_EXEC) == 0) { in pmap_fault_fixup()
5958 pv_addr_t pv; in pmap_grow_map() local
5966 &pv); in pmap_grow_map()
5967 pa = pv.pv_pa; in pmap_grow_map()
6926 pv_addr_t *pv; in kernel_pt_lookup() local
6928 SLIST_FOREACH(pv, &kernel_pt_list, pv_list) { in kernel_pt_lookup()
6929 if (pv->pv_pa == (pa & ~PGOFSET)) in kernel_pt_lookup()
6930 return pv->pv_va | (pa & PGOFSET); in kernel_pt_lookup()
8034 struct pv_entry *pv; in pmap_dump_ncpg() local
8051 SLIST_FOREACH(pv, &md->pvh_list, pv_link) { in pmap_dump_ncpg()
8053 (pm == pv->pv_pmap) ? '*' : ' ', in pmap_dump_ncpg()
8054 pv->pv_va, pv->pv_flags); in pmap_dump_ncpg()
8064 pv_addr_t *pv, *npv; in pmap_boot_pageadd() local
8066 if ((pv = SLIST_FIRST(&pmap_boot_freeq)) != NULL) { in pmap_boot_pageadd()
8067 if (newpv->pv_pa < pv->pv_va) { in pmap_boot_pageadd()
8068 KASSERT(newpv->pv_pa + newpv->pv_size <= pv->pv_pa); in pmap_boot_pageadd()
8069 if (newpv->pv_pa + newpv->pv_size == pv->pv_pa) { in pmap_boot_pageadd()
8070 newpv->pv_size += pv->pv_size; in pmap_boot_pageadd()
8073 pv = NULL; in pmap_boot_pageadd()
8075 for (; (npv = SLIST_NEXT(pv, pv_list)) != NULL; in pmap_boot_pageadd()
8076 pv = npv) { in pmap_boot_pageadd()
8077 KASSERT(pv->pv_pa + pv->pv_size < npv->pv_pa); in pmap_boot_pageadd()
8078 KASSERT(pv->pv_pa < newpv->pv_pa); in pmap_boot_pageadd()
8081 if (pv->pv_pa + pv->pv_size == newpv->pv_pa) { in pmap_boot_pageadd()
8082 pv->pv_size += newpv->pv_size; in pmap_boot_pageadd()
8088 SLIST_INSERT_AFTER(pv, newpv, pv_list); in pmap_boot_pageadd()
8095 if (pv) { in pmap_boot_pageadd()
8096 SLIST_INSERT_AFTER(pv, newpv, pv_list); in pmap_boot_pageadd()
8106 pv_addr_t *pv, **pvp; in pmap_boot_pagealloc() local
8114 (pv = *pvp) != NULL; in pmap_boot_pagealloc()
8115 pvp = &SLIST_NEXT(pv, pv_list)) { in pmap_boot_pagealloc()
8121 KASSERT(pv->pv_size > 0); in pmap_boot_pagealloc()
8122 if (pv->pv_size < amount) in pmap_boot_pagealloc()
8126 if (((pv->pv_pa + off) & mask) == match in pmap_boot_pagealloc()
8127 && off + amount <= pv->pv_size) in pmap_boot_pagealloc()
8133 rpv->pv_va = pv->pv_va + off; in pmap_boot_pagealloc()
8134 rpv->pv_pa = pv->pv_pa + off; in pmap_boot_pagealloc()
8136 pv->pv_size -= amount; in pmap_boot_pagealloc()
8137 if (pv->pv_size == 0) { in pmap_boot_pagealloc()
8139 KASSERT((vaddr_t) pv == rpv->pv_va); in pmap_boot_pagealloc()
8140 *pvp = SLIST_NEXT(pv, pv_list); in pmap_boot_pagealloc()
8142 KASSERT((vaddr_t) pv == rpv->pv_va); in pmap_boot_pagealloc()
8144 *newpv = *pv; in pmap_boot_pagealloc()
8148 } else if (off < pv->pv_size) { in pmap_boot_pagealloc()
8150 *newpv = *pv; in pmap_boot_pagealloc()
8155 SLIST_NEXT(pv, pv_list) = newpv; in pmap_boot_pagealloc()
8156 pv->pv_size = off; in pmap_boot_pagealloc()
8158 KASSERT((vaddr_t) pv != rpv->pv_va); in pmap_boot_pagealloc()
8168 (pv = *pvp) != NULL; in pmap_boot_pagealloc()
8169 pvp = &SLIST_NEXT(pv, pv_list)) { in pmap_boot_pagealloc()
8170 if (SLIST_NEXT(pv, pv_list) == NULL) in pmap_boot_pagealloc()
8181 if (spn == atop(pv->pv_pa + pv->pv_size) in pmap_boot_pagealloc()
8182 && pv->pv_va + pv->pv_size <= ptoa(epn)) { in pmap_boot_pagealloc()
8183 rpv->pv_va = pv->pv_va; in pmap_boot_pagealloc()
8184 rpv->pv_pa = pv->pv_pa; in pmap_boot_pagealloc()
8188 ptoa(spn) + (pv->pv_va - pv->pv_pa), in pmap_boot_pagealloc()
8190 amount - pv->pv_size, in pmap_boot_pagealloc()
8194 uvm_physseg_unplug(spn, atop(amount - pv->pv_size)); in pmap_boot_pagealloc()
8206 pv_addr_t pv; in pmap_steal_memory() local
8208 pmap_boot_pagealloc(size, 0, 0, &pv); in pmap_steal_memory()
8210 return pv.pv_va; in pmap_steal_memory()