Lines Matching refs:sva
570 static int pmap_remove_l3e(pmap_t pmap, pml3_entry_t *pdq, vm_offset_t sva,
572 static int pmap_remove_pte(pmap_t pmap, pt_entry_t *ptq, vm_offset_t sva,
577 static bool pmap_remove_ptes(pmap_t pmap, vm_offset_t sva, vm_offset_t eva,
2218 mmu_radix_advise(pmap_t pmap, vm_offset_t sva, vm_offset_t eva, in mmu_radix_advise() argument
2234 for (; sva < eva; sva = va_next) { in mmu_radix_advise()
2235 l1e = pmap_pml1e(pmap, sva); in mmu_radix_advise()
2237 va_next = (sva + L1_PAGE_SIZE) & ~L1_PAGE_MASK; in mmu_radix_advise()
2238 if (va_next < sva) in mmu_radix_advise()
2242 l2e = pmap_l1e_to_l2e(l1e, sva); in mmu_radix_advise()
2244 va_next = (sva + L2_PAGE_SIZE) & ~L2_PAGE_MASK; in mmu_radix_advise()
2245 if (va_next < sva) in mmu_radix_advise()
2249 va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK; in mmu_radix_advise()
2250 if (va_next < sva) in mmu_radix_advise()
2252 l3e = pmap_l2e_to_l3e(l2e, sva); in mmu_radix_advise()
2260 if (!pmap_demote_l3e_locked(pmap, l3e, sva, &lock)) { in mmu_radix_advise()
2284 KASSERT(va >= sva, in mmu_radix_advise()
2299 for (pte = pmap_l3e_to_pte(l3e, sva); sva != va_next; in mmu_radix_advise()
2300 pte++, sva += PAGE_SIZE) { in mmu_radix_advise()
2301 MPASS(pte == pmap_pte(pmap, sva)); in mmu_radix_advise()
4502 pmap_protect_l3e(pmap_t pmap, pt_entry_t *l3e, vm_offset_t sva, vm_prot_t prot) in pmap_protect_l3e() argument
4510 KASSERT((sva & L3_PAGE_MASK) == 0, in pmap_protect_l3e()
4517 eva = sva + L3_PAGE_SIZE; in pmap_protect_l3e()
4518 for (va = sva, m = PHYS_TO_VM_PAGE(oldpde & PG_PS_FRAME); in pmap_protect_l3e()
4542 mmu_radix_protect(pmap_t pmap, vm_offset_t sva, vm_offset_t eva, in mmu_radix_protect() argument
4552 CTR5(KTR_PMAP, "%s(%p, %#x, %#x, %#x)", __func__, pmap, sva, eva, in mmu_radix_protect()
4557 mmu_radix_remove(pmap, sva, eva); in mmu_radix_protect()
4568 pmap, sva, eva, prot, pmap->pm_pid); in mmu_radix_protect()
4573 for (; sva < eva; sva = va_next) { in mmu_radix_protect()
4574 l1e = pmap_pml1e(pmap, sva); in mmu_radix_protect()
4576 va_next = (sva + L1_PAGE_SIZE) & ~L1_PAGE_MASK; in mmu_radix_protect()
4577 if (va_next < sva) in mmu_radix_protect()
4582 l2e = pmap_l1e_to_l2e(l1e, sva); in mmu_radix_protect()
4584 va_next = (sva + L2_PAGE_SIZE) & ~L2_PAGE_MASK; in mmu_radix_protect()
4585 if (va_next < sva) in mmu_radix_protect()
4590 va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK; in mmu_radix_protect()
4591 if (va_next < sva) in mmu_radix_protect()
4594 l3e = pmap_l2e_to_l3e(l2e, sva); in mmu_radix_protect()
4611 if (sva + L3_PAGE_SIZE == va_next && eva >= va_next) { in mmu_radix_protect()
4612 if (pmap_protect_l3e(pmap, l3e, sva, prot)) in mmu_radix_protect()
4615 } else if (!pmap_demote_l3e(pmap, l3e, sva)) { in mmu_radix_protect()
4626 for (pte = pmap_l3e_to_pte(l3e, sva); sva != va_next; pte++, in mmu_radix_protect()
4627 sva += PAGE_SIZE) { in mmu_radix_protect()
4632 MPASS(pte == pmap_pte(pmap, sva)); in mmu_radix_protect()
4657 sva, obits, pbits); in mmu_radix_protect()
4669 mmu_radix_qenter(vm_offset_t sva, vm_page_t *ma, int count) in mmu_radix_qenter() argument
4672 CTR4(KTR_PMAP, "%s(%#x, %p, %d)", __func__, sva, ma, count); in mmu_radix_qenter()
4680 va = sva; in mmu_radix_qenter()
4682 while (va < sva + PAGE_SIZE * count) { in mmu_radix_qenter()
4704 pmap_invalidate_range(kernel_pmap, sva, sva + count * in mmu_radix_qenter()
4711 mmu_radix_qremove(vm_offset_t sva, int count) in mmu_radix_qremove() argument
4716 CTR3(KTR_PMAP, "%s(%#x, %d)", __func__, sva, count); in mmu_radix_qremove()
4717 KASSERT(sva >= VM_MIN_KERNEL_ADDRESS, ("usermode or dmap va %lx", sva)); in mmu_radix_qremove()
4719 va = sva; in mmu_radix_qremove()
4721 while (va < sva + PAGE_SIZE * count) { in mmu_radix_qremove()
4728 pmap_invalidate_range(kernel_pmap, sva, va); in mmu_radix_qremove()
4940 vm_offset_t sva; in pmap_demote_l3e_locked() local
4969 sva = trunc_2mpage(va); in pmap_demote_l3e_locked()
4970 pmap_remove_l3e(pmap, l3e, sva, &free, lockp); in pmap_demote_l3e_locked()
4971 pmap_invalidate_l3e_page(pmap, sva, oldpde); in pmap_demote_l3e_locked()
5072 pmap_remove_l3e(pmap_t pmap, pml3_entry_t *pdq, vm_offset_t sva, in pmap_remove_l3e() argument
5081 KASSERT((sva & L3_PAGE_MASK) == 0, in pmap_remove_l3e()
5090 pmap_pvh_free(pvh, pmap, sva); in pmap_remove_l3e()
5091 eva = sva + L3_PAGE_SIZE; in pmap_remove_l3e()
5092 for (va = sva, m = PHYS_TO_VM_PAGE(oldpde & PG_PS_FRAME); in pmap_remove_l3e()
5104 pmap_remove_kernel_l3e(pmap, pdq, sva); in pmap_remove_l3e()
5106 mpte = pmap_remove_pt_page(pmap, sva); in pmap_remove_l3e()
5115 return (pmap_unuse_pt(pmap, sva, be64toh(*pmap_pml2e(pmap, sva)), free)); in pmap_remove_l3e()
5185 pmap_remove_ptes(pmap_t pmap, vm_offset_t sva, vm_offset_t eva, in pmap_remove_ptes() argument
5195 for (pte = pmap_l3e_to_pte(l3e, sva); sva != eva; pte++, in pmap_remove_ptes()
5196 sva += PAGE_SIZE) { in pmap_remove_ptes()
5197 MPASS(pte == pmap_pte(pmap, sva)); in pmap_remove_ptes()
5206 va = sva; in pmap_remove_ptes()
5207 if (pmap_remove_pte(pmap, pte, sva, be64toh(*l3e), free, lockp)) { in pmap_remove_ptes()
5209 sva += PAGE_SIZE; in pmap_remove_ptes()
5216 pmap_invalidate_range(pmap, va, sva); in pmap_remove_ptes()
5221 mmu_radix_remove(pmap_t pmap, vm_offset_t sva, vm_offset_t eva) in mmu_radix_remove() argument
5231 CTR4(KTR_PMAP, "%s(%p, %#x, %#x)", __func__, pmap, sva, eva); in mmu_radix_remove()
5243 sva = (sva + PAGE_MASK) & ~PAGE_MASK; in mmu_radix_remove()
5253 if (sva + PAGE_SIZE == eva) { in mmu_radix_remove()
5254 l3e = pmap_pml3e(pmap, sva); in mmu_radix_remove()
5256 anyvalid = pmap_remove_page(pmap, sva, l3e, &free); in mmu_radix_remove()
5262 for (; sva < eva; sva = va_next) { in mmu_radix_remove()
5265 l1e = pmap_pml1e(pmap, sva); in mmu_radix_remove()
5267 va_next = (sva + L1_PAGE_SIZE) & ~L1_PAGE_MASK; in mmu_radix_remove()
5268 if (va_next < sva) in mmu_radix_remove()
5273 l2e = pmap_l1e_to_l2e(l1e, sva); in mmu_radix_remove()
5275 va_next = (sva + L2_PAGE_SIZE) & ~L2_PAGE_MASK; in mmu_radix_remove()
5276 if (va_next < sva) in mmu_radix_remove()
5284 va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK; in mmu_radix_remove()
5285 if (va_next < sva) in mmu_radix_remove()
5288 l3e = pmap_l2e_to_l3e(l2e, sva); in mmu_radix_remove()
5305 if (sva + L3_PAGE_SIZE == va_next && eva >= va_next) { in mmu_radix_remove()
5306 pmap_remove_l3e(pmap, l3e, sva, &free, &lock); in mmu_radix_remove()
5309 } else if (!pmap_demote_l3e_locked(pmap, l3e, sva, in mmu_radix_remove()
5325 if (pmap_remove_ptes(pmap, sva, va_next, l3e, &free, &lock)) in mmu_radix_remove()
5714 mmu_radix_unwire(pmap_t pmap, vm_offset_t sva, vm_offset_t eva) in mmu_radix_unwire() argument
5722 CTR4(KTR_PMAP, "%s(%p, %#x, %#x)", __func__, pmap, sva, eva); in mmu_radix_unwire()
5724 for (; sva < eva; sva = va_next) { in mmu_radix_unwire()
5725 l1e = pmap_pml1e(pmap, sva); in mmu_radix_unwire()
5727 va_next = (sva + L1_PAGE_SIZE) & ~L1_PAGE_MASK; in mmu_radix_unwire()
5728 if (va_next < sva) in mmu_radix_unwire()
5732 l2e = pmap_l1e_to_l2e(l1e, sva); in mmu_radix_unwire()
5734 va_next = (sva + L2_PAGE_SIZE) & ~L2_PAGE_MASK; in mmu_radix_unwire()
5735 if (va_next < sva) in mmu_radix_unwire()
5739 va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK; in mmu_radix_unwire()
5740 if (va_next < sva) in mmu_radix_unwire()
5742 l3e = pmap_l2e_to_l3e(l2e, sva); in mmu_radix_unwire()
5754 if (sva + L3_PAGE_SIZE == va_next && eva >= va_next) { in mmu_radix_unwire()
5759 } else if (!pmap_demote_l3e(pmap, l3e, sva)) in mmu_radix_unwire()
5764 for (pte = pmap_l3e_to_pte(l3e, sva); sva != va_next; pte++, in mmu_radix_unwire()
5765 sva += PAGE_SIZE) { in mmu_radix_unwire()
5766 MPASS(pte == pmap_pte(pmap, sva)); in mmu_radix_unwire()
6200 pmap_invalidate_cache_range(vm_offset_t sva, vm_offset_t eva) in pmap_invalidate_cache_range() argument
6202 cpu_flush_dcache((void *)sva, eva - sva); in pmap_invalidate_cache_range()