Lines Matching refs:va
250 uintptr_t va; in hat_alloc() local
318 for (va = rp->hkr_start_va; va != rp->hkr_end_va; in hat_alloc()
319 va += cnt * LEVEL_SIZE(rp->hkr_level)) { in hat_alloc()
324 ht = htable_create(hat, va, rp->hkr_level, in hat_alloc()
327 start = htable_va2entry(va, ht); in hat_alloc()
329 eva = va + in hat_alloc()
344 src = htable_lookup(kas.a_hat, va, rp->hkr_level); in hat_alloc()
476 hat_kernelbase(uintptr_t va) in hat_kernelbase() argument
479 va &= LEVEL_MASK(1); in hat_kernelbase()
481 if (IN_VA_HOLE(va)) in hat_kernelbase()
482 panic("_userlimit %p will fall in VA hole\n", (void *)va); in hat_kernelbase()
483 return (va); in hat_kernelbase()
845 uintptr_t va; in hat_init_finish() local
873 va = kernelbase; in hat_init_finish()
874 if ((va & LEVEL_MASK(2)) != va) { in hat_init_finish()
875 va = P2ROUNDUP(va, LEVEL_SIZE(2)); in hat_init_finish()
876 NEXT_HKR(r, 1, kernelbase, va); in hat_init_finish()
878 if (va != 0) in hat_init_finish()
879 NEXT_HKR(r, 2, va, 0); in hat_init_finish()
894 for (va = rp->hkr_start_va; va != rp->hkr_end_va; in hat_init_finish()
895 va += LEVEL_SIZE(rp->hkr_level)) { in hat_init_finish()
898 if (IN_HYPERVISOR_VA(va)) in hat_init_finish()
903 (ht = htable_getpage(kas.a_hat, va, NULL)) != in hat_init_finish()
909 (void) htable_create(kas.a_hat, va, rp->hkr_level - 1, in hat_init_finish()
1422 uintptr_t va, in hati_load_common() argument
1449 ht = htable_lookup(hat, va, level); in hati_load_common()
1458 ht = htable_create(hat, va, level, NULL); in hati_load_common()
1461 entry = htable_va2entry(va, ht); in hati_load_common()
1467 if (ht->ht_vaddr > va || va > HTABLE_LAST_PAGE(ht)) in hati_load_common()
1469 (void *)ht, (void *)va); in hati_load_common()
1478 if (hat == kas.a_hat && va >= kernelbase) in hati_load_common()
1504 uintptr_t va = (uintptr_t)addr; in hat_kmap_load() local
1507 pgcnt_t pg_off = mmu_btop(va - mmu.kmap_addr); in hat_kmap_load()
1527 ht = mmu.kmap_htables[(va - mmu.kmap_htables[0]->ht_vaddr) >> in hat_kmap_load()
1529 entry = htable_va2entry(va, ht); in hat_kmap_load()
1582 uintptr_t va = (uintptr_t)addr; in hat_memload() local
1587 ASSERT(IS_PAGEALIGNED(va)); in hat_memload()
1588 ASSERT(hat == kas.a_hat || va < _userlimit); in hat_memload()
1592 ASSERT(!IN_VA_HOLE(va)); in hat_memload()
1598 if (mmu.kmap_addr <= va && va < mmu.kmap_eaddr) { in hat_memload()
1610 if (hati_load_common(hat, va, pp, attr, flags, level, pfn) != 0) in hat_memload()
1635 uintptr_t va = (uintptr_t)addr; in hat_memload_array() local
1636 uintptr_t eaddr = va + len; in hat_memload_array()
1644 ASSERT(IS_PAGEALIGNED(va)); in hat_memload_array()
1645 ASSERT(hat == kas.a_hat || va + len <= _userlimit); in hat_memload_array()
1658 while (va < eaddr) { in hat_memload_array()
1668 if (!IS_P2ALIGNED(va, pgsize) || in hat_memload_array()
1669 (eaddr - va) < pgsize || in hat_memload_array()
1704 ASSERT(!IN_VA_HOLE(va)); in hat_memload_array()
1705 while (hati_load_common(hat, va, pages[pgindx], attr, in hat_memload_array()
1716 va += pgsize; in hat_memload_array()
1770 uintptr_t va = ALIGN2PAGE(addr); in hat_devload() local
1771 uintptr_t eva = va + len; in hat_devload()
1779 ASSERT(IS_PAGEALIGNED(va)); in hat_devload()
1787 while (va < eva) { in hat_devload()
1796 if (IS_P2ALIGNED(va, pgsize) && in hat_devload()
1797 (eva - va) >= pgsize && in hat_devload()
1847 ASSERT(!IN_VA_HOLE(va)); in hat_devload()
1848 while (hati_load_common(hat, va, pp, a, f, level, pfn) != 0) { in hat_devload()
1858 va += pgsize; in hat_devload()
2037 hat_tlb_inval_range(hat_t *hat, uintptr_t va, size_t len) in hat_tlb_inval_range() argument
2062 va = DEMAP_ALL_ADDR; in hat_tlb_inval_range()
2070 if (va == DEMAP_ALL_ADDR) { in hat_tlb_inval_range()
2074 xen_flush_va((caddr_t)(va + i)); in hat_tlb_inval_range()
2078 (xc_arg_t)va, (xc_arg_t)len); in hat_tlb_inval_range()
2129 if (va == DEMAP_ALL_ADDR) { in hat_tlb_inval_range()
2133 xen_flush_va((caddr_t)(va + i)); in hat_tlb_inval_range()
2137 (xc_arg_t)va, (xc_arg_t)len); in hat_tlb_inval_range()
2144 if (va == DEMAP_ALL_ADDR) { in hat_tlb_inval_range()
2148 xen_gflush_va((caddr_t)(va + i), in hat_tlb_inval_range()
2153 xc_call((xc_arg_t)hat, (xc_arg_t)va, (xc_arg_t)len, in hat_tlb_inval_range()
2162 hat_tlb_inval(hat_t *hat, uintptr_t va) in hat_tlb_inval() argument
2164 hat_tlb_inval_range(hat, va, MMU_PAGESIZE); in hat_tlb_inval()
2276 uintptr_t va = (uintptr_t)addr; in hat_kmap_unload() local
2277 uintptr_t eva = va + len; in hat_kmap_unload()
2284 for (; va < eva; va += MMU_PAGESIZE) { in hat_kmap_unload()
2288 pg_index = mmu_btop(va - mmu.kmap_addr); in hat_kmap_unload()
2295 ht = mmu.kmap_htables[(va - mmu.kmap_htables[0]->ht_vaddr) in hat_kmap_unload()
2297 entry = htable_va2entry(va, ht); in hat_kmap_unload()
2313 uintptr_t va = (uintptr_t)addr; in hat_unload() local
2316 ASSERT(hat == kas.a_hat || va + len <= _userlimit); in hat_unload()
2321 if (mmu.kmap_addr <= va && va < mmu.kmap_eaddr) { in hat_unload()
2469 hat_flush_range(hat_t *hat, caddr_t va, size_t size) in hat_flush_range() argument
2472 caddr_t endva = va + size; in hat_flush_range()
2474 while (va < endva) { in hat_flush_range()
2475 sz = hat_getpagesize(hat, va); in hat_flush_range()
2485 xen_flush_va(va); in hat_flush_range()
2487 mmu_tlbflush_entry(va); in hat_flush_range()
2489 va += sz; in hat_flush_range()
2880 is_it_dism(hat_t *hat, caddr_t va) in is_it_dism() argument
2886 seg = as_findseg(hat->hat_as, va, 0); in is_it_dism()
2888 ASSERT(seg->s_base <= va); in is_it_dism()
3872 uintptr_t va = (uintptr_t)addr; in hat_mempte_setup() local
3878 ASSERT(IS_PAGEALIGNED(va)); in hat_mempte_setup()
3879 ASSERT(!IN_VA_HOLE(va)); in hat_mempte_setup()
3882 ht = htable_getpte(kas.a_hat, va, &entry, &oldpte, 0); in hat_mempte_setup()
3884 ht = htable_create(kas.a_hat, va, 0, NULL); in hat_mempte_setup()
3885 entry = htable_va2entry(va, ht); in hat_mempte_setup()
3962 uintptr_t va = (uintptr_t)addr; in hat_mempte_remap() local
3973 ASSERT(IS_PAGEALIGNED(va)); in hat_mempte_remap()
3974 ASSERT(!IN_VA_HOLE(va)); in hat_mempte_remap()
3975 ht = htable_getpte(kas.a_hat, va, &entry, NULL, 0); in hat_mempte_remap()
3985 if (HYPERVISOR_update_va_mapping(va, pte, UVMF_INVLPG | UVMF_LOCAL)) in hat_mempte_remap()