Home
last modified time | relevance | path

Searched refs:pfn_to_pa (Results 1 – 21 of 21) sorted by relevance

/titanic_41/usr/src/uts/i86pc/vm/
H A Dhat_pte.h103 (pa_to_ma(pfn_to_pa(pfn)) | mmu.ptp_bits[(l) + 1])
106 ((pfn_to_pa(pfn & ~PFN_IS_FOREIGN_MFN) | mmu.pte_bits[l]) | \
108 (pa_to_ma(pfn_to_pa(pfn)) | mmu.pte_bits[l]))
111 (pfn_to_pa(pfn) | mmu.ptp_bits[(l) + 1])
113 (pfn_to_pa(pfn) | mmu.pte_bits[l])
286 #define pfn_to_pa(pfn) (mmu_ptob((paddr_t)(pfn))) macro
H A Dhat_kdi.c121 return (pfn_to_pa(mfn) | (pa & MMU_PAGEOFFSET)); in kdi_ptom()
140 return (pfn_to_pa(pfn) | (ma & MMU_PAGEOFFSET)); in kdi_mtop()
171 *pap = pfn_to_pa(pfn) + (vaddr & MMU_PAGEOFFSET); in kdi_vtop()
180 *pap = pfn_to_pa(CPU->cpu_current_hat->hat_htable->ht_pfn); in kdi_vtop()
H A Dhtable.c893 block_zero_no_xmm(kpm_vbase + pfn_to_pa(hat->hat_user_ptable), in htable_alloc()
1549 ptep = kbm_remap_window(pfn_to_pa(pfn), 0); in htable_attach()
1560 ptep = kbm_remap_window(pfn_to_pa(pfn), 0); in htable_attach()
1953 caddr_t va = kbm_remap_window(pfn_to_pa(pfn), 1); in x86pte_mapin()
2178 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry)); in x86pte_cas()
2188 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa( in x86pte_cas()
2251 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry)); in x86pte_inval()
2403 set_pteval(pfn_to_pa(dest->ht_pfn), entry, in x86pte_copy()
2409 pfn_to_pa(dest->ht_hat->hat_user_ptable), in x86pte_copy()
H A Dkboot_mmu.c368 x86pte_t pte_val = pa_to_ma(pfn_to_pa(pfn)) | PT_WRITABLE | in kbm_remap()
H A Dvm_machdep.c938 return (address_in_memlist(phys_install, pfn_to_pa(pf), 1)); in pf_is_memory()
1090 if (pa_to_ma(pfn_to_pa(pp->p_pagenum)) < in check_dma()
1093 if (pa_to_ma(pfn_to_pa(pp->p_pagenum)) >= in check_dma()
3133 pgaddr = pa_to_ma(pfn_to_pa(pp->p_pagenum)); in page_get_mnode_anylist()
3225 pgaddr = pa_to_ma(pfn_to_pa(pp->p_pagenum)); in page_get_mnode_anylist()
H A Dhat_i86.c1670 !IS_P2ALIGNED(pfn_to_pa(pfn), pgsize)) in hat_memload_array()
3903 p = PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry); in hat_mempte_setup()
/titanic_41/usr/src/uts/i86pc/io/gfx_private/
H A Dgfxp_vm.c163 *pa = pa_to_ma(pfn_to_pa(hat_getpfnum(as->a_hat, addr))); in gfxp_va2pa()
165 *pa = pfn_to_pa(hat_getpfnum(as->a_hat, addr)); in gfxp_va2pa()
254 return (pfn_to_pa(xen_assign_pfn(btop(paddr)))); in gfxp_convert_addr()
/titanic_41/usr/src/uts/i86xpv/os/
H A Dxen_mmu.c160 mach_addr = pa_to_ma(pfn_to_pa(va_to_pfn( in xen_relocate_start_info()
178 pa_to_ma(pfn_to_pa(va_to_pfn((caddr_t)mfn_list + off))); in xen_relocate_start_info()
228 kbm_map_ma(pfn_to_pa(xen_info->console.domU.mfn), addr, 0); in xen_relocate_start_info()
394 return (pfn_to_pa(pfn) + (ma & MMU_PAGEOFFSET)); in ma_to_pa()
H A Dmach_kdi.c184 gdtpa = pfn_to_pa(va_to_pfn(bgdt)); in boot_kdi_tmpinit()
237 gdtpa = pfn_to_pa(va_to_pfn(bgdt)); in boot_kdi_tmpinit()
H A Dballoon.c248 metasz = pfn_to_pa(metapgs); in balloon_init_new_pages()
352 memlist_add(pfn_to_pa(meta_start), num_pages, &mem->memlist, in balloon_init_new_pages()
H A Dxpv_panic.c167 pte = pfn_to_pa(pfn) | PT_VALID; in xpv_panic_map()
/titanic_41/usr/src/uts/i86pc/io/
H A Drootnex.c1160 pbase = pfn_to_pa(xen_assign_pfn(mmu_btop(rbase))); in rootnex_map_regspec()
1349 pbase = pfn_to_pa(xen_assign_pfn(mmu_btop(rbase))) | in rootnex_map_handle()
2775 paddr = pfn_to_pa(pp->p_pagenum) + offset; in rootnex_need_bounce_seg()
2785 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_need_bounce_seg()
2795 paddr = pfn_to_pa(hat_getpfnum(sglinfo->si_asp->a_hat, vaddr)); in rootnex_need_bounce_seg()
2821 paddr = pfn_to_pa(pp->p_pagenum); in rootnex_need_bounce_seg()
2826 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_need_bounce_seg()
2830 paddr = pfn_to_pa(hat_getpfnum(sglinfo->si_asp->a_hat, in rootnex_need_bounce_seg()
2917 paddr = pfn_to_pa(pp->p_pagenum) + offset; in rootnex_get_sgl()
2938 paddr = pfn_to_pa(pplist[pcnt]->p_pagenum); in rootnex_get_sgl()
[all …]
H A Dimmu_qinv.c456 qinv->qinv_table.qinv_mem_paddr = pfn_to_pa( in qinv_setup()
481 qinv->qinv_sync.qinv_mem_paddr = pfn_to_pa( in qinv_setup()
H A Dimmu_dvma.c467 pgtable->hwpg_paddr = pfn_to_pa(hat_getpfnum(kas.a_hat, vaddr)); in pgtable_ctor()
2594 paddr = pfn_to_pa(page->p_pagenum) + offset; in immu_map_dvmaseg()
2604 paddr = pfn_to_pa(pparray[pcnt]->p_pagenum) + offset; in immu_map_dvmaseg()
2608 paddr = pfn_to_pa(hat_getpfnum(vas->a_hat, in immu_map_dvmaseg()
2657 paddr = pfn_to_pa(page->p_pagenum); in immu_map_dvmaseg()
2661 paddr = pfn_to_pa(pparray[pcnt]->p_pagenum); in immu_map_dvmaseg()
2665 paddr = pfn_to_pa(hat_getpfnum(vas->a_hat, vaddr)); in immu_map_dvmaseg()
H A Dimmu.c1338 paddr = pfn_to_pa(hat_getpfnum(kas.a_hat, vaddr)); in immu_init_inv_wait()
H A Dimmu_intrmap.c375 intrmap->intrmap_paddr = pfn_to_pa( in init_unit()
/titanic_41/usr/src/uts/intel/ia32/os/
H A Ddesctbls.c610 gdtpa = pfn_to_pa(va_to_pfn(gdt0)); in init_gdt()
817 gdtpa = pfn_to_pa(va_to_pfn(gdt0)); in init_gdt()
1190 CPU->cpu_m.mcpu_gdtpa = pfn_to_pa(va_to_pfn(gdt)); in init_desctbls()
/titanic_41/usr/src/uts/i86pc/os/
H A Dpci_cfgacc_x86.c64 phys_addr = pfn_to_pa(xen_assign_pfn(mmu_btop(phys_addr))) | in pci_cfgacc_map()
H A Dmp_implfuncs.c222 base = pfn_to_pa(xen_assign_pfn(mmu_btop(addr))) | in psm_map_phys_new()
H A Dfakebop.c204 if (physmem != 0 && high_phys > pfn_to_pa(physmem)) in do_bop_phys_alloc()
205 high_phys = pfn_to_pa(physmem); in do_bop_phys_alloc()
1951 pa = pfn_to_pa(xen_assign_pfn(mmu_btop(pa))) | (pa & MMU_PAGEOFFSET); in vmap_phys()
H A Dstartup.c819 pfn_addr = pfn_to_pa(pfn); in avail_filter()
844 pfn_addr = pfn_to_pa(pfn); in avail_filter()