Lines Matching refs:va

101 static void pmap_pte_walk(pml1_entry_t *l1, vm_offset_t va);
191 vm_offset_t va, uint16_t ap) in radix_tlbie() argument
195 MPASS((va & PAGE_MASK) == 0); in radix_tlbie()
198 rb = va | is | ap; in radix_tlbie()
204 radix_tlbie_fixup(uint32_t pid, vm_offset_t va, int ap) in radix_tlbie_fixup() argument
209 TLBIEL_INVAL_PAGE, 0, 0, va, ap); in radix_tlbie_fixup()
212 TLBIEL_INVAL_PAGE, pid, 0, va, ap); in radix_tlbie_fixup()
216 radix_tlbie_invlpg_user_4k(uint32_t pid, vm_offset_t va) in radix_tlbie_invlpg_user_4k() argument
220 TLBIEL_INVAL_PAGE, pid, 0, va, TLBIE_ACTUAL_PAGE_4K); in radix_tlbie_invlpg_user_4k()
221 radix_tlbie_fixup(pid, va, TLBIE_ACTUAL_PAGE_4K); in radix_tlbie_invlpg_user_4k()
225 radix_tlbie_invlpg_user_2m(uint32_t pid, vm_offset_t va) in radix_tlbie_invlpg_user_2m() argument
229 TLBIEL_INVAL_PAGE, pid, 0, va, TLBIE_ACTUAL_PAGE_2M); in radix_tlbie_invlpg_user_2m()
230 radix_tlbie_fixup(pid, va, TLBIE_ACTUAL_PAGE_2M); in radix_tlbie_invlpg_user_2m()
250 radix_tlbie_invlpg_kernel_4k(vm_offset_t va) in radix_tlbie_invlpg_kernel_4k() argument
254 TLBIEL_INVAL_PAGE, 0, 0, va, TLBIE_ACTUAL_PAGE_4K); in radix_tlbie_invlpg_kernel_4k()
255 radix_tlbie_fixup(0, va, TLBIE_ACTUAL_PAGE_4K); in radix_tlbie_invlpg_kernel_4k()
259 radix_tlbie_invlpg_kernel_2m(vm_offset_t va) in radix_tlbie_invlpg_kernel_2m() argument
263 TLBIEL_INVAL_PAGE, 0, 0, va, TLBIE_ACTUAL_PAGE_2M); in radix_tlbie_invlpg_kernel_2m()
264 radix_tlbie_fixup(0, va, TLBIE_ACTUAL_PAGE_2M); in radix_tlbie_invlpg_kernel_2m()
269 radix_tlbie_invlpg_kernel_1g(vm_offset_t va) in radix_tlbie_invlpg_kernel_1g() argument
273 TLBIEL_INVAL_PAGE, 0, 0, va, TLBIE_ACTUAL_PAGE_1G); in radix_tlbie_invlpg_kernel_1g()
274 radix_tlbie_fixup(0, va, TLBIE_ACTUAL_PAGE_1G); in radix_tlbie_invlpg_kernel_1g()
294 pmap_l3e_pindex(vm_offset_t va) in pmap_l3e_pindex() argument
296 return ((va & PG_FRAME) >> L3_PAGE_SIZE_SHIFT); in pmap_l3e_pindex()
300 pmap_pml3e_index(vm_offset_t va) in pmap_pml3e_index() argument
303 return ((va >> L3_PAGE_SIZE_SHIFT) & RPTE_MASK); in pmap_pml3e_index()
307 pmap_pml2e_index(vm_offset_t va) in pmap_pml2e_index() argument
309 return ((va >> L2_PAGE_SIZE_SHIFT) & RPTE_MASK); in pmap_pml2e_index()
313 pmap_pml1e_index(vm_offset_t va) in pmap_pml1e_index() argument
315 return ((va & PG_FRAME) >> L1_PAGE_SIZE_SHIFT); in pmap_pml1e_index()
320 pmap_pte_index(vm_offset_t va) in pmap_pte_index() argument
323 return ((va >> PAGE_SHIFT) & RPTE_MASK); in pmap_pte_index()
328 pmap_l3e_to_pte(pt_entry_t *l3e, vm_offset_t va) in pmap_l3e_to_pte() argument
335 return (&pte[pmap_pte_index(va)]); in pmap_l3e_to_pte()
340 pmap_l2e_to_l3e(pt_entry_t *l2e, vm_offset_t va) in pmap_l2e_to_l3e() argument
347 return (&l3e[pmap_pml3e_index(va)]); in pmap_l2e_to_l3e()
352 pmap_l1e_to_l2e(pt_entry_t *l1e, vm_offset_t va) in pmap_l1e_to_l2e() argument
360 return (&l2e[pmap_pml2e_index(va)]); in pmap_l1e_to_l2e()
364 pmap_pml1e(pmap_t pmap, vm_offset_t va) in pmap_pml1e() argument
367 return (&pmap->pm_pml1[pmap_pml1e_index(va)]); in pmap_pml1e()
371 pmap_pml2e(pmap_t pmap, vm_offset_t va) in pmap_pml2e() argument
375 l1e = pmap_pml1e(pmap, va); in pmap_pml2e()
378 return (pmap_l1e_to_l2e(l1e, va)); in pmap_pml2e()
382 pmap_pml3e(pmap_t pmap, vm_offset_t va) in pmap_pml3e() argument
386 l2e = pmap_pml2e(pmap, va); in pmap_pml3e()
389 return (pmap_l2e_to_l3e(l2e, va)); in pmap_pml3e()
393 pmap_pte(pmap_t pmap, vm_offset_t va) in pmap_pte() argument
397 l3e = pmap_pml3e(pmap, va); in pmap_pte()
400 return (pmap_l3e_to_pte(l3e, va)); in pmap_pte()
437 vm_paddr_t mmu_radix_extract(pmap_t pmap, vm_offset_t va);
465 void mmu_radix_sync_icache(pmap_t pm, vm_offset_t va, vm_size_t sz);
494 static void mmu_radix_dumpsys_map(vm_paddr_t pa, size_t sz, void **va);
566 static bool pmap_demote_l3e_locked(pmap_t pmap, pml3_entry_t *l3e, vm_offset_t va,
568 static bool pmap_demote_l3e(pmap_t pmap, pml3_entry_t *pde, vm_offset_t va);
574 static vm_page_t pmap_remove_pt_page(pmap_t pmap, vm_offset_t va);
575 static bool pmap_remove_page(pmap_t pmap, vm_offset_t va, pml3_entry_t *pde,
580 static bool pmap_pv_insert_l3e(pmap_t pmap, vm_offset_t va, pml3_entry_t l3e,
583 static void pmap_pv_promote_l3e(pmap_t pmap, vm_offset_t va, vm_paddr_t pa,
586 static void pmap_pvh_free(struct md_page *pvh, pmap_t pmap, vm_offset_t va);
588 static vm_page_t mmu_radix_enter_quick_locked(pmap_t pmap, vm_offset_t va, vm_page_t m,
591 static bool pmap_enter_2mpage(pmap_t pmap, vm_offset_t va, vm_page_t m,
593 static int pmap_enter_l3e(pmap_t pmap, vm_offset_t va, pml3_entry_t newpde,
599 static vm_page_t pmap_allocl3e(pmap_t pmap, vm_offset_t va,
601 static vm_page_t pmap_allocpte(pmap_t pmap, vm_offset_t va,
603 static void _pmap_unwire_ptp(pmap_t pmap, vm_offset_t va, vm_page_t m,
605 static bool pmap_unwire_ptp(pmap_t pmap, vm_offset_t va, vm_page_t m, struct spglist *free);
609 static int pmap_change_attr_locked(vm_offset_t va, vm_size_t size, int mode, bool flush);
887 pagezero(vm_offset_t va) in pagezero() argument
889 va = trunc_page(va); in pagezero()
891 bzero((void *)va, PAGE_SIZE); in pagezero()
906 kvtopte(vm_offset_t va) in kvtopte() argument
910 l3e = pmap_pml3e(kernel_pmap, va); in kvtopte()
913 return (pmap_l3e_to_pte(l3e, va)); in kvtopte()
917 mmu_radix_kenter(vm_offset_t va, vm_paddr_t pa) in mmu_radix_kenter() argument
921 pte = kvtopte(va); in mmu_radix_kenter()
934 pmap_nofault_pte(pmap_t pmap, vm_offset_t va, int *is_l3e) in pmap_nofault_pte() argument
939 va &= PG_PS_FRAME; in pmap_nofault_pte()
940 l3e = pmap_pml3e(pmap, va); in pmap_nofault_pte()
949 va &= PG_FRAME; in pmap_nofault_pte()
950 pte = pmap_l3e_to_pte(l3e, va); in pmap_nofault_pte()
957 pmap_nofault(pmap_t pmap, vm_offset_t va, vm_prot_t flags) in pmap_nofault() argument
966 if ((pte = pmap_nofault_pte(pmap, va, &is_l3e)) == NULL) in pmap_nofault()
977 __func__, pmap, va, flags, origpte); in pmap_nofault()
984 printf("%s(%p, %#lx, %#x) (%#lx)\n", __func__, pmap, va, in pmap_nofault()
988 if ((pte = pmap_nofault_pte(pmap, va, &is_l3e)) == NULL || in pmap_nofault()
1140 pmap_invalidate_l3e_page(pmap_t pmap, vm_offset_t va, pml3_entry_t l3e) in pmap_invalidate_l3e_page() argument
1156 pmap_invalidate_range(pmap, va, va + L3_PAGE_SIZE - 1); in pmap_invalidate_l3e_page()
1158 pmap_invalidate_page_2m(pmap, va); in pmap_invalidate_l3e_page()
1258 pmap_pvh_remove(struct md_page *pvh, pmap_t pmap, vm_offset_t va) in pmap_pvh_remove() argument
1271 if (pmap == PV_PMAP(pv) && va == pv->pv_va) { in pmap_pvh_remove()
1286 pmap_pv_demote_l3e(pmap_t pmap, vm_offset_t va, vm_paddr_t pa, in pmap_pv_demote_l3e() argument
1307 va = trunc_2mpage(va); in pmap_pv_demote_l3e()
1308 pv = pmap_pvh_remove(pvh, pmap, va); in pmap_pv_demote_l3e()
1316 va_last = va + L3_PAGE_SIZE - PAGE_SIZE; in pmap_pv_demote_l3e()
1326 va += PAGE_SIZE; in pmap_pv_demote_l3e()
1327 pv->pv_va = va; in pmap_pv_demote_l3e()
1334 if (va == va_last) in pmap_pv_demote_l3e()
1383 vm_offset_t va; in reclaim_pv_chunk() local
1456 va = pv->pv_va; in reclaim_pv_chunk()
1457 l3e = pmap_pml3e(pmap, va); in reclaim_pv_chunk()
1460 pte = pmap_l3e_to_pte(l3e, va); in reclaim_pv_chunk()
1482 pmap_unuse_pt(pmap, va, be64toh(*l3e), &free); in reclaim_pv_chunk()
1675 pmap_pv_promote_l3e(pmap_t pmap, vm_offset_t va, vm_paddr_t pa, in pmap_pv_promote_l3e() argument
1695 va = trunc_2mpage(va); in pmap_pv_promote_l3e()
1696 pv = pmap_pvh_remove(&m->md, pmap, va); in pmap_pv_promote_l3e()
1702 va_last = va + L3_PAGE_SIZE - PAGE_SIZE; in pmap_pv_promote_l3e()
1705 va += PAGE_SIZE; in pmap_pv_promote_l3e()
1706 pmap_pvh_free(&m->md, pmap, va); in pmap_pv_promote_l3e()
1707 } while (va < va_last); in pmap_pv_promote_l3e()
1717 pmap_pvh_free(struct md_page *pvh, pmap_t pmap, vm_offset_t va) in pmap_pvh_free() argument
1721 pv = pmap_pvh_remove(pvh, pmap, va); in pmap_pvh_free()
1731 pmap_try_insert_pv_entry(pmap_t pmap, vm_offset_t va, vm_page_t m, in pmap_try_insert_pv_entry() argument
1739 pv->pv_va = va; in pmap_try_insert_pv_entry()
2057 vm_offset_t va; in mmu_radix_late_bootstrap() local
2084 va = virtual_avail + KSTACK_GUARD_PAGES * PAGE_SIZE; in mmu_radix_late_bootstrap()
2085 virtual_avail = va + kstack_pages * PAGE_SIZE; in mmu_radix_late_bootstrap()
2086 CTR2(KTR_PMAP, "moea64_bootstrap: kstack0 at %#x (%#x)", pa, va); in mmu_radix_late_bootstrap()
2087 thread0.td_kstack = va; in mmu_radix_late_bootstrap()
2089 mmu_radix_kenter(va, pa); in mmu_radix_late_bootstrap()
2091 va += PAGE_SIZE; in mmu_radix_late_bootstrap()
2226 vm_offset_t va, va_next; in mmu_radix_advise() local
2280 va = eva; in mmu_radix_advise()
2281 if (va > va_next) in mmu_radix_advise()
2282 va = va_next; in mmu_radix_advise()
2283 va -= PAGE_SIZE; in mmu_radix_advise()
2284 KASSERT(va >= sva, in mmu_radix_advise()
2286 pte = pmap_l3e_to_pte(l3e, va); in mmu_radix_advise()
2289 pmap_remove_pte(pmap, pte, va, be64toh(*l3e), NULL, in mmu_radix_advise()
2298 va = va_next; in mmu_radix_advise()
2323 if (va != va_next) { in mmu_radix_advise()
2325 va = va_next; in mmu_radix_advise()
2328 if (va != va_next) in mmu_radix_advise()
2433 vm_offset_t va; in mmu_radix_clear_modify() local
2465 va = pv->pv_va; in mmu_radix_clear_modify()
2466 l3e = pmap_pml3e(pmap, va); in mmu_radix_clear_modify()
2469 pmap_demote_l3e_locked(pmap, l3e, va, &lock) && in mmu_radix_clear_modify()
2476 va += VM_PAGE_TO_PHYS(m) - (oldl3e & in mmu_radix_clear_modify()
2478 pte = pmap_l3e_to_pte(l3e, va); in mmu_radix_clear_modify()
2485 pmap_invalidate_page(pmap, va); in mmu_radix_clear_modify()
2723 pmap_promote_l3e(pmap_t pmap, pml3_entry_t *pde, vm_offset_t va, in pmap_promote_l3e() argument
2742 " in pmap %p", va, pmap); in pmap_promote_l3e()
2766 " in pmap %p", va, pmap); in pmap_promote_l3e()
2779 (va & ~L3_PAGE_MASK), pmap); in pmap_promote_l3e()
2783 " in pmap %p", va, pmap); in pmap_promote_l3e()
2798 KASSERT(mpte->pindex == pmap_l3e_pindex(va), in pmap_promote_l3e()
2802 "pmap_promote_l3e: failure for va %#lx in pmap %p", va, in pmap_promote_l3e()
2811 pmap_pv_promote_l3e(pmap, va, newpde & PG_PS_FRAME, lockp); in pmap_promote_l3e()
2817 " in pmap %p", va, pmap); in pmap_promote_l3e()
2826 mmu_radix_enter(pmap_t pmap, vm_offset_t va, vm_page_t m, in mmu_radix_enter() argument
2839 va = trunc_page(va); in mmu_radix_enter()
2842 CTR6(KTR_PMAP, "pmap_enter(%p, %#lx, %p, %#x, %#x, %d)", pmap, va, in mmu_radix_enter()
2844 KASSERT(va <= VM_MAX_KERNEL_ADDRESS, ("pmap_enter: toobig")); in mmu_radix_enter()
2845 KASSERT((m->oflags & VPO_UNMANAGED) != 0 || !VA_IS_CLEANMAP(va), in mmu_radix_enter()
2869 if (va >= DMAP_MIN_ADDRESS) in mmu_radix_enter()
2887 KASSERT((va & L3_PAGE_MASK) == 0, ("pmap_enter: va unaligned")); in mmu_radix_enter()
2889 rv = pmap_enter_l3e(pmap, va, newpte | RPTE_LEAF, flags, m, &lock); in mmu_radix_enter()
2899 l3e = pmap_pml3e(pmap, va); in mmu_radix_enter()
2901 pmap_demote_l3e_locked(pmap, l3e, va, &lock))) { in mmu_radix_enter()
2902 pte = pmap_l3e_to_pte(l3e, va); in mmu_radix_enter()
2903 if (va < VM_MAXUSER_ADDRESS && mpte == NULL) { in mmu_radix_enter()
2907 } else if (va < VM_MAXUSER_ADDRESS) { in mmu_radix_enter()
2913 mpte = _pmap_allocpte(pmap, pmap_l3e_pindex(va), in mmu_radix_enter()
2924 panic("pmap_enter: invalid page directory va=%#lx", va); in mmu_radix_enter()
2937 pmap, va, m, prot, flags, psind, pmap->pm_pid, in mmu_radix_enter()
2940 pmap_pte_walk(pmap->pm_pml1, va); in mmu_radix_enter()
2962 " va: 0x%lx", va)); in mmu_radix_enter()
3005 ("pmap_enter: unexpected pa update for %#lx", va)); in mmu_radix_enter()
3019 pv = pmap_pvh_remove(&om->md, pmap, va); in mmu_radix_enter()
3046 pmap, va, m, prot, flags, psind, in mmu_radix_enter()
3066 pv->pv_va = va; in mmu_radix_enter()
3087 ("pmap_enter: unexpected pa update for %#lx", va)); in mmu_radix_enter()
3123 pmap_promote_l3e(pmap, l3e, va, &lock) == 0) in mmu_radix_enter()
3129 pmap_invalidate_page(pmap, va); in mmu_radix_enter()
3145 pmap_abort_ptp(pmap_t pmap, vm_offset_t va, vm_page_t pdpg) in pmap_abort_ptp() argument
3150 if (pmap_unwire_ptp(pmap, va, pdpg, &free)) { in pmap_abort_ptp()
3157 pmap_invalidate_page(pmap, va); in pmap_abort_ptp()
3170 pmap_enter_2mpage(pmap_t pmap, vm_offset_t va, vm_page_t m, vm_prot_t prot, in pmap_enter_2mpage() argument
3184 if (va >= DMAP_MIN_ADDRESS) in pmap_enter_2mpage()
3186 return (pmap_enter_l3e(pmap, va, newpde, PMAP_ENTER_NOSLEEP | in pmap_enter_2mpage()
3203 pmap_enter_l3e(pmap_t pmap, vm_offset_t va, pml3_entry_t newpde, u_int flags, in pmap_enter_l3e() argument
3215 if ((pdpg = pmap_allocl3e(pmap, va, (flags & PMAP_ENTER_NOSLEEP) != 0 ? in pmap_enter_l3e()
3218 " in pmap %p", va, pmap); in pmap_enter_l3e()
3222 l3e = &l3e[pmap_pml3e_index(va)]; in pmap_enter_l3e()
3230 " in pmap %p", va, pmap); in pmap_enter_l3e()
3242 (void)pmap_remove_l3e(pmap, l3e, va, &free, lockp); in pmap_enter_l3e()
3243 pmap_invalidate_l3e_page(pmap, va, oldl3e); in pmap_enter_l3e()
3245 if (pmap_remove_ptes(pmap, va, va + L3_PAGE_SIZE, l3e, in pmap_enter_l3e()
3250 if (va >= VM_MAXUSER_ADDRESS) { in pmap_enter_l3e()
3272 pmap_abort_ptp(pmap, va, pdpg); in pmap_enter_l3e()
3275 uwptpg->pindex = pmap_l3e_pindex(va); in pmap_enter_l3e()
3279 pmap_abort_ptp(pmap, va, pdpg); in pmap_enter_l3e()
3291 if (!pmap_pv_insert_l3e(pmap, va, newpde, flags, lockp)) { in pmap_enter_l3e()
3292 pmap_abort_ptp(pmap, va, pdpg); in pmap_enter_l3e()
3294 mt = pmap_remove_pt_page(pmap, va); in pmap_enter_l3e()
3304 " in pmap %p", va, pmap); in pmap_enter_l3e()
3329 " in pmap %p", va, pmap); in pmap_enter_l3e()
3339 vm_offset_t va; in mmu_radix_enter_object() local
3355 va = start + ptoa(m->pindex - m_start->pindex); in mmu_radix_enter_object()
3356 if ((va & L3_PAGE_MASK) == 0 && va + L3_PAGE_SIZE <= end && in mmu_radix_enter_object()
3358 pmap_enter_2mpage(pmap, va, m, prot, &lock)) { in mmu_radix_enter_object()
3361 mpte = mmu_radix_enter_quick_locked(pmap, va, m, prot, in mmu_radix_enter_object()
3375 mmu_radix_enter_quick_locked(pmap_t pmap, vm_offset_t va, vm_page_t m, in mmu_radix_enter_quick_locked() argument
3382 KASSERT(!VA_IS_CLEANMAP(va) || in mmu_radix_enter_quick_locked()
3391 if (va < VM_MAXUSER_ADDRESS) { in mmu_radix_enter_quick_locked()
3398 ptepindex = pmap_l3e_pindex(va); in mmu_radix_enter_quick_locked()
3405 ptepa = pmap_pml3e(pmap, va); in mmu_radix_enter_quick_locked()
3429 pte = &pte[pmap_pte_index(va)]; in mmu_radix_enter_quick_locked()
3432 pte = pmap_pte(pmap, va); in mmu_radix_enter_quick_locked()
3446 !pmap_try_insert_pv_entry(pmap, va, m, lockp)) { in mmu_radix_enter_quick_locked()
3449 if (pmap_unwire_ptp(pmap, va, mpte, &free)) { in mmu_radix_enter_quick_locked()
3482 mmu_radix_enter_quick(pmap_t pmap, vm_offset_t va, vm_page_t m, in mmu_radix_enter_quick() argument
3491 mmu_radix_enter_quick_locked(pmap, va, m, prot, NULL, &lock, in mmu_radix_enter_quick()
3502 mmu_radix_extract(pmap_t pmap, vm_offset_t va) in mmu_radix_extract() argument
3508 l3e = pmap_pml3e(pmap, va); in mmu_radix_extract()
3512 pa = (be64toh(*l3e) & PG_PS_FRAME) | (va & L3_PAGE_MASK); in mmu_radix_extract()
3513 pa |= (va & L3_PAGE_MASK); in mmu_radix_extract()
3523 pte = pmap_l3e_to_pte(l3e, va); in mmu_radix_extract()
3527 pa = (pa & PG_FRAME) | (va & PAGE_MASK); in mmu_radix_extract()
3528 pa |= (va & PAGE_MASK); in mmu_radix_extract()
3534 mmu_radix_extract_and_hold(pmap_t pmap, vm_offset_t va, vm_prot_t prot) in mmu_radix_extract_and_hold() argument
3541 CTR4(KTR_PMAP, "%s(%p, %#x, %#x)", __func__, pmap, va, prot); in mmu_radix_extract_and_hold()
3543 l3ep = pmap_pml3e(pmap, va); in mmu_radix_extract_and_hold()
3548 (va & L3_PAGE_MASK)); in mmu_radix_extract_and_hold()
3551 pte = be64toh(*pmap_l3e_to_pte(l3ep, va)); in mmu_radix_extract_and_hold()
4410 pmap_allocl3e(pmap_t pmap, vm_offset_t va, struct rwlock **lockp) in pmap_allocl3e() argument
4417 pdpe = pmap_pml2e(pmap, va); in pmap_allocl3e()
4424 ptepindex = pmap_l3e_pindex(va); in pmap_allocl3e()
4434 pmap_allocpte(pmap_t pmap, vm_offset_t va, struct rwlock **lockp) in pmap_allocpte() argument
4443 ptepindex = pmap_l3e_pindex(va); in pmap_allocpte()
4448 pd = pmap_pml3e(pmap, va); in pmap_allocpte()
4455 if (!pmap_demote_l3e_locked(pmap, pd, va, lockp)) { in pmap_allocpte()
4505 vm_offset_t eva, va; in pmap_protect_l3e() local
4518 for (va = sva, m = PHYS_TO_VM_PAGE(oldpde & PG_PS_FRAME); in pmap_protect_l3e()
4519 va < eva; va += PAGE_SIZE, m++) in pmap_protect_l3e()
4676 vm_offset_t va; in mmu_radix_qenter() local
4680 va = sva; in mmu_radix_qenter()
4681 pte = kvtopte(va); in mmu_radix_qenter()
4682 while (va < sva + PAGE_SIZE * count) { in mmu_radix_qenter()
4683 if (__predict_false((va & L3_PAGE_MASK) == 0)) in mmu_radix_qenter()
4684 pte = kvtopte(va); in mmu_radix_qenter()
4685 MPASS(pte == pmap_pte(kernel_pmap, va)); in mmu_radix_qenter()
4700 va += PAGE_SIZE; in mmu_radix_qenter()
4713 vm_offset_t va; in mmu_radix_qremove() local
4719 va = sva; in mmu_radix_qremove()
4720 pte = kvtopte(va); in mmu_radix_qremove()
4721 while (va < sva + PAGE_SIZE * count) { in mmu_radix_qremove()
4722 if (__predict_false((va & L3_PAGE_MASK) == 0)) in mmu_radix_qremove()
4723 pte = kvtopte(va); in mmu_radix_qremove()
4726 va += PAGE_SIZE; in mmu_radix_qremove()
4728 pmap_invalidate_range(kernel_pmap, sva, va); in mmu_radix_qremove()
4771 pmap_remove_pt_page(pmap_t pmap, vm_offset_t va) in pmap_remove_pt_page() argument
4775 return (vm_radix_remove(&pmap->pm_radix, pmap_l3e_pindex(va))); in pmap_remove_pt_page()
4785 pmap_unwire_ptp(pmap_t pmap, vm_offset_t va, vm_page_t m, struct spglist *free) in pmap_unwire_ptp() argument
4790 _pmap_unwire_ptp(pmap, va, m, free); in pmap_unwire_ptp()
4797 _pmap_unwire_ptp(pmap_t pmap, vm_offset_t va, vm_page_t m, struct spglist *free) in _pmap_unwire_ptp() argument
4807 pml1 = pmap_pml1e(pmap, va); in _pmap_unwire_ptp()
4812 l2e = pmap_pml2e(pmap, va); in _pmap_unwire_ptp()
4817 l3e = pmap_pml3e(pmap, va); in _pmap_unwire_ptp()
4825 pdpg = PHYS_TO_VM_PAGE(be64toh(*pmap_pml2e(pmap, va)) & PG_FRAME); in _pmap_unwire_ptp()
4826 pmap_unwire_ptp(pmap, va, pdpg, free); in _pmap_unwire_ptp()
4832 pdppg = PHYS_TO_VM_PAGE(be64toh(*pmap_pml1e(pmap, va)) & PG_FRAME); in _pmap_unwire_ptp()
4833 pmap_unwire_ptp(pmap, va, pdppg, free); in _pmap_unwire_ptp()
4848 pmap_unuse_pt(pmap_t pmap, vm_offset_t va, pml3_entry_t ptepde, in pmap_unuse_pt() argument
4853 if (va >= VM_MAXUSER_ADDRESS) in pmap_unuse_pt()
4857 return (pmap_unwire_ptp(pmap, va, mpte, free)); in pmap_unuse_pt()
4883 pmap_pv_insert_l3e(pmap_t pmap, vm_offset_t va, pml3_entry_t pde, u_int flags, in pmap_pv_insert_l3e() argument
4895 pv->pv_va = va; in pmap_pv_insert_l3e()
4919 pmap_demote_l3e(pmap_t pmap, pml3_entry_t *pde, vm_offset_t va) in pmap_demote_l3e() argument
4925 rv = pmap_demote_l3e_locked(pmap, pde, va, &lock); in pmap_demote_l3e()
4932 pmap_demote_l3e_locked(pmap_t pmap, pml3_entry_t *l3e, vm_offset_t va, in pmap_demote_l3e_locked() argument
4947 if ((oldpde & PG_A) == 0 || (mpte = pmap_remove_pt_page(pmap, va)) == in pmap_demote_l3e_locked()
4966 (va >= DMAP_MIN_ADDRESS && va < DMAP_MAX_ADDRESS ? in pmap_demote_l3e_locked()
4969 sva = trunc_2mpage(va); in pmap_demote_l3e_locked()
4974 " in pmap %p", va, pmap); in pmap_demote_l3e_locked()
4977 mpte->pindex = pmap_l3e_pindex(va); in pmap_demote_l3e_locked()
4978 if (va < VM_MAXUSER_ADDRESS) in pmap_demote_l3e_locked()
5026 pmap_invalidate_l3e_page(pmap, trunc_2mpage(va), oldpde); in pmap_demote_l3e_locked()
5031 pmap_pv_demote_l3e(pmap, va, oldpde & PG_PS_FRAME, lockp); in pmap_demote_l3e_locked()
5035 " in pmap %p", va, pmap); in pmap_demote_l3e_locked()
5043 pmap_remove_kernel_l3e(pmap_t pmap, pml3_entry_t *l3e, vm_offset_t va) in pmap_remove_kernel_l3e() argument
5050 mpte = pmap_remove_pt_page(pmap, va); in pmap_remove_kernel_l3e()
5077 vm_offset_t eva, va; in pmap_remove_l3e() local
5092 for (va = sva, m = PHYS_TO_VM_PAGE(oldpde & PG_PS_FRAME); in pmap_remove_l3e()
5093 va < eva; va += PAGE_SIZE, m++) { in pmap_remove_l3e()
5122 pmap_remove_pte(pmap_t pmap, pt_entry_t *ptq, vm_offset_t va, in pmap_remove_pte() argument
5141 pmap_pvh_free(&m->md, pmap, va); in pmap_remove_pte()
5149 return (pmap_unuse_pt(pmap, va, ptepde, free)); in pmap_remove_pte()
5156 pmap_remove_page(pmap_t pmap, vm_offset_t va, pml3_entry_t *l3e, in pmap_remove_page() argument
5167 pte = pmap_l3e_to_pte(l3e, va); in pmap_remove_page()
5173 invalidate_all = pmap_remove_pte(pmap, pte, va, be64toh(*l3e), free, &lock); in pmap_remove_page()
5177 pmap_invalidate_page(pmap, va); in pmap_remove_page()
5189 vm_offset_t va; in pmap_remove_ptes() local
5194 va = eva; in pmap_remove_ptes()
5199 if (va != eva) { in pmap_remove_ptes()
5201 va = eva; in pmap_remove_ptes()
5205 if (va == eva) in pmap_remove_ptes()
5206 va = sva; in pmap_remove_ptes()
5215 else if (va != eva) in pmap_remove_ptes()
5216 pmap_invalidate_range(pmap, va, sva); in pmap_remove_ptes()
5346 vm_offset_t va; in mmu_radix_remove_all() local
5372 va = pv->pv_va; in mmu_radix_remove_all()
5373 l3e = pmap_pml3e(pmap, va); in mmu_radix_remove_all()
5374 (void)pmap_demote_l3e_locked(pmap, l3e, va, &lock); in mmu_radix_remove_all()
5899 vm_offset_t va, tmpva, ppa, offset; in mmu_radix_mapdev_attr() local
5907 va = kva_alloc(size); in mmu_radix_mapdev_attr()
5912 if (!va) in mmu_radix_mapdev_attr()
5915 for (tmpva = va; size > 0;) { in mmu_radix_mapdev_attr()
5923 return ((void *)(va + offset)); in mmu_radix_mapdev_attr()
5960 vm_offset_t offset, va; in mmu_radix_unmapdev() local
5965 va = (vm_offset_t)p; in mmu_radix_unmapdev()
5966 if (va >= DMAP_MIN_ADDRESS && va < DMAP_MAX_ADDRESS) in mmu_radix_unmapdev()
5969 offset = va & PAGE_MASK; in mmu_radix_unmapdev()
5971 va = trunc_page(va); in mmu_radix_unmapdev()
5974 mmu_radix_qremove(va, atop(size)); in mmu_radix_unmapdev()
5975 kva_free(va, size); in mmu_radix_unmapdev()
5980 mmu_radix_sync_icache(pmap_t pm, vm_offset_t va, vm_size_t sz) in mmu_radix_sync_icache() argument
5989 pa = pmap_extract(pm, va); in mmu_radix_sync_icache()
5990 sync_sz = PAGE_SIZE - (va & PAGE_MASK); in mmu_radix_sync_icache()
5993 pa += (va & PAGE_MASK); in mmu_radix_sync_icache()
5996 va += sync_sz; in mmu_radix_sync_icache()
6021 pmap_demote_l2e(pmap_t pmap, pml2_entry_t *l2e, vm_offset_t va) in pmap_demote_l2e() argument
6035 " in pmap %p", va, pmap); in pmap_demote_l2e()
6038 pdpg->pindex = va >> L2_PAGE_SIZE_SHIFT; in pmap_demote_l2e()
6067 " in pmap %p", va, pmap); in pmap_demote_l2e()
6072 mmu_radix_kextract(vm_offset_t va) in mmu_radix_kextract() argument
6077 CTR2(KTR_PMAP, "%s(%#x)", __func__, va); in mmu_radix_kextract()
6078 if (va >= DMAP_MIN_ADDRESS && va < DMAP_MAX_ADDRESS) { in mmu_radix_kextract()
6079 pa = DMAP_TO_PHYS(va); in mmu_radix_kextract()
6082 l3e = *pmap_pml3e(kernel_pmap, va); in mmu_radix_kextract()
6084 pa = (be64toh(l3e) & PG_PS_FRAME) | (va & L3_PAGE_MASK); in mmu_radix_kextract()
6085 pa |= (va & L3_PAGE_MASK); in mmu_radix_kextract()
6095 pa = be64toh(*pmap_l3e_to_pte(&l3e, va)); in mmu_radix_kextract()
6096 pa = (pa & PG_FRAME) | (va & PAGE_MASK); in mmu_radix_kextract()
6097 pa |= (va & PAGE_MASK); in mmu_radix_kextract()
6124 mmu_radix_kenter_attr(vm_offset_t va, vm_paddr_t pa, vm_memattr_t ma) in mmu_radix_kenter_attr() argument
6129 pte = kvtopte(va); in mmu_radix_kenter_attr()
6137 mmu_radix_kremove(vm_offset_t va) in mmu_radix_kremove() argument
6141 CTR2(KTR_PMAP, "%s(%#x)", __func__, va); in mmu_radix_kremove()
6143 pte = kvtopte(va); in mmu_radix_kremove()
6176 void **va) in mmu_radix_dumpsys_map() argument
6178 CTR4(KTR_PMAP, "%s(%#jx, %#zx, %p)", __func__, (uintmax_t)pa, sz, va); in mmu_radix_dumpsys_map()
6206 mmu_radix_change_attr(vm_offset_t va, vm_size_t size, in mmu_radix_change_attr() argument
6211 CTR4(KTR_PMAP, "%s(%#x, %#zx, %d)", __func__, va, size, mode); in mmu_radix_change_attr()
6213 error = pmap_change_attr_locked(va, size, mode, true); in mmu_radix_change_attr()
6219 pmap_change_attr_locked(vm_offset_t va, vm_size_t size, int mode, bool flush) in pmap_change_attr_locked() argument
6230 base = trunc_page(va); in pmap_change_attr_locked()
6231 offset = va & PAGE_MASK; in pmap_change_attr_locked()
6437 vm_offset_t va; in mmu_radix_page_array_startup() local
6454 for (va = start; va < end; va += L3_PAGE_SIZE) { in mmu_radix_page_array_startup()
6455 pfn = first_page + (va - start) / sizeof(struct vm_page); in mmu_radix_page_array_startup()
6457 l2e = pmap_pml2e(kernel_pmap, va); in mmu_radix_page_array_startup()
6464 pde = pmap_l2e_to_l3e(l2e, va); in mmu_radix_page_array_startup()
6482 pmap_pte_walk(pml1_entry_t *l1, vm_offset_t va) in pmap_pte_walk() argument
6489 l1e = &l1[pmap_pml1e_index(va)]; in pmap_pte_walk()
6490 db_printf("VA %#016lx l1e %#016lx", va, be64toh(*l1e)); in pmap_pte_walk()
6495 l2e = pmap_l1e_to_l2e(l1e, va); in pmap_pte_walk()
6501 l3e = pmap_l2e_to_l3e(l2e, va); in pmap_pte_walk()
6507 pte = pmap_l3e_to_pte(l3e, va); in pmap_pte_walk()
6533 vm_offset_t va; in DB_SHOW_COMMAND() local
6540 va = (vm_offset_t)addr; in DB_SHOW_COMMAND()
6542 if (va >= DMAP_MIN_ADDRESS) in DB_SHOW_COMMAND()
6549 pmap_pte_walk(pmap->pm_pml1, va); in DB_SHOW_COMMAND()