Lines Matching +full:5 +full:kd

135  * s(ava) = 5 - n
143 #define AVA_PAGE_SHIFT(b) (5 - (MIN(54, 77-(b)) + 1 - 50))
183 slb_init(kvm_t *kd) in slb_init() argument
191 hdr = &kd->vmst->hdr; in slb_init()
192 data = PPC64_MMU_DATA(kd); in slb_init()
198 data->slbs = _kvm_malloc(kd, slbsize); in slb_init()
200 _kvm_err(kd, kd->program, "cannot allocate slbs"); in slb_init()
236 ppc64mmu_hpt_cleanup(kvm_t *kd) in ppc64mmu_hpt_cleanup() argument
240 if (kd->vmst == NULL) in ppc64mmu_hpt_cleanup()
243 data = PPC64_MMU_DATA(kd); in ppc64mmu_hpt_cleanup()
246 PPC64_MMU_DATA(kd) = NULL; in ppc64mmu_hpt_cleanup()
250 ppc64mmu_hpt_init(kvm_t *kd) in ppc64mmu_hpt_init() argument
255 data = _kvm_malloc(kd, sizeof(*data)); in ppc64mmu_hpt_init()
257 _kvm_err(kd, kd->program, "cannot allocate MMU data"); in ppc64mmu_hpt_init()
261 PPC64_MMU_DATA(kd) = data; in ppc64mmu_hpt_init()
263 if (slb_init(kd) == -1) in ppc64mmu_hpt_init()
269 ppc64mmu_hpt_cleanup(kd); in ppc64mmu_hpt_init()
274 slb_search(kvm_t *kd, kvaddr_t ea) in slb_search() argument
280 data = PPC64_MMU_DATA(kd); in slb_search()
301 _kvm_err(kd, kd->program, "%s: segment not found for EA 0x%jx", in slb_search()
309 pte_get(kvm_t *kd, u_long ptex) in pte_get() argument
313 p = _kvm_pmap_get(kd, ptex, sizeof(pte)); in pte_get()
320 pte_search(kvm_t *kd, ppc64_slb_entry_t *slb, uint64_t hid, kvaddr_t ea, in pte_search() argument
365 hmask = kd->vmst->hdr.pmapsize / (8 * sizeof(ppc64_pt_entry_t)) - 1; in pte_search()
379 pte = pte_get(kd, ptex); in pte_search()
416 return (pte_search(kd, slb, LPTEH_HID, ea, p)); in pte_search()
418 _kvm_err(kd, kd->program, in pte_search()
430 pte_lookup(kvm_t *kd, kvaddr_t ea, ppc64_pt_entry_t *pte) in pte_lookup() argument
435 if ((slb = slb_search(kd, ea)) == NULL) in pte_lookup()
439 return (pte_search(kd, slb, 0, ea, pte)); in pte_lookup()
443 ppc64mmu_hpt_kvatop(kvm_t *kd, kvaddr_t va, off_t *pa) in ppc64mmu_hpt_kvatop() argument
452 vm = kd->vmst; in ppc64mmu_hpt_kvatop()
473 ptoff = _kvm_pt_find(kd, pgpa, PPC64_PAGE_SIZE); in ppc64mmu_hpt_kvatop()
475 _kvm_err(kd, kd->program, "%s: " in ppc64mmu_hpt_kvatop()
484 if ((err = pte_lookup(kd, va, &pte)) == -1) { in ppc64mmu_hpt_kvatop()
485 _kvm_err(kd, kd->program, in ppc64mmu_hpt_kvatop()
497 ptoff = _kvm_pt_find(kd, pgpa, PPC64_PAGE_SIZE); in ppc64mmu_hpt_kvatop()
499 _kvm_err(kd, kd->program, "%s: " in ppc64mmu_hpt_kvatop()
507 _kvm_err(kd, kd->program, in ppc64mmu_hpt_kvatop()
514 _kvm_err(kd, 0, "invalid address (0x%jx)", (uintmax_t)va); in ppc64mmu_hpt_kvatop()
531 slb_vsid_search(kvm_t *kd, uint64_t vsid) in slb_vsid_search() argument
537 data = PPC64_MMU_DATA(kd); in slb_vsid_search()
552 _kvm_err(kd, kd->program, in slb_vsid_search()
561 get_ea(kvm_t *kd, ppc64_pt_entry_t *pte, u_long ptex) in get_ea() argument
569 if ((slb = slb_vsid_search(kd, vsid)) == NULL) in get_ea()
581 if (kd->vmst->hdr.pmapsize / (8 * sizeof(ppc64_pt_entry_t)) < in get_ea()
584 * Add 0 to 5 EA bits, right after VSID. in get_ea()
585 * b == 12: 5 bits in get_ea()
590 (SLBE_ESID_SHIFT - 5 + shift); in get_ea()
602 ppc64mmu_hpt_walk_pages(kvm_t *kd, kvm_walk_pages_cb_t *cb, void *arg) in ppc64mmu_hpt_walk_pages() argument
612 vm = kd->vmst; in ppc64mmu_hpt_walk_pages()
617 ppc64_pt_entry_t pte = pte_get(kd, ptex); in ppc64mmu_hpt_walk_pages()
628 if ((va = get_ea(kd, &pte, ptex)) == ~0UL) in ppc64mmu_hpt_walk_pages()
643 if (!_kvm_visit_cb(kd, cb, arg, pa, va, dva, in ppc64mmu_hpt_walk_pages()