Searched refs:MMU_PAGE_SIZE (Results 1 – 8 of 8) sorted by relevance
/illumos-gate/usr/src/uts/sun4/io/px/ |
H A D | px_mmu.c | 110 MMU_PTOB(tsb_entries) - cache_size, MMU_PAGE_SIZE, in px_mmu_attach() 111 NULL, NULL, NULL, MMU_PAGE_SIZE, VM_SLEEP); in px_mmu_attach() 140 (void) vmem_xalloc(mmu_p->mmu_dvma_map, MMU_PAGE_SIZE, in px_mmu_attach() 141 MMU_PAGE_SIZE, 0, 0, va, va + MMU_PAGE_SIZE, in px_mmu_attach()
|
H A D | px_dma.c | 302 if (align && (align > MMU_PAGE_SIZE)) in px_dma_attr2hdl() 312 align = MAX(align, MMU_PAGE_SIZE) - 1; in px_dma_attr2hdl() 526 vaddr = mp->dmai_object.dmao_obj.virt_obj.v_addr + MMU_PAGE_SIZE; in px_dma_vapfn() 527 for (i = 1; i < npages; i++, vaddr += MMU_PAGE_SIZE) { in px_dma_vapfn() 634 uint32_t redzone_sz = PX_HAS_REDZONE(mp) ? MMU_PAGE_SIZE : 0; in px_dvma_win() 642 mp->dmai_winsize = MMU_PAGE_SIZE; in px_dvma_win() 671 mp->dmai_winsize = P2ROUNDUP(xfer_sz + pg_off, MMU_PAGE_SIZE); in px_dvma_win() 800 MMU_PAGE_SIZE, sleep); in px_dvma_map() 808 MAX(mp->dmai_attr.dma_attr_align, MMU_PAGE_SIZE), in px_dvma_map() 833 MMU_PAGE_SIZE); in px_dvma_map() [all …]
|
H A D | px_lib.h | 43 #define MMU_PAGE_SIZE (1 << MMU_PAGE_SHIFT) macro 44 #define MMU_PAGE_MASK ~(MMU_PAGE_SIZE - 1) 45 #define MMU_PAGE_OFFSET (MMU_PAGE_SIZE - 1)
|
H A D | px_fdvma.c | 177 MMU_PTOB(npages), MMU_PAGE_SIZE, 0, in px_fdvma_reserve() 203 mp->dmai_size = npages * MMU_PAGE_SIZE; in px_fdvma_reserve()
|
H A D | px.c | 1369 *((ulong_t *)result) = MMU_PAGE_SIZE; in px_ctlops()
|
/illumos-gate/usr/src/uts/sun4v/io/px/ |
H A D | px_lib4v.h | 94 hat_getpfnum(kas.a_hat, ((caddr_t)addr + (MMU_PAGE_SIZE * i))))
|
H A D | px_lib4v.c | 469 ttes2map = (MMU_PAGE_SIZE - P2PHASE(ra, MMU_PAGE_SIZE)) >> 3; in px_lib_iommu_map()
|
/illumos-gate/usr/src/uts/sun4u/io/px/ |
H A D | px_hlib.c | 1828 for (i = 0; i < pages; i++, a += MMU_PAGE_SIZE, tsb_index++) { in hvio_iommu_map() 1974 vmem_xfree(dvma_map, va, MMU_PAGE_SIZE); in hvio_obptsb_detach()
|