Home
last modified time | relevance | path

Searched refs:pg_off (Results 1 – 7 of 7) sorted by relevance

/titanic_41/usr/src/uts/i86pc/vm/
H A Di86_mmu.c262 ulong_t pg_off; in xen_kpm_create() local
264 for (pg_off = 0; pg_off < LEVEL_SIZE(lvl); pg_off += MMU_PAGESIZE) { in xen_kpm_create()
266 kbm_read_only((uintptr_t)kpm_vbase + paddr + pg_off, in xen_kpm_create()
267 paddr + pg_off); in xen_kpm_create()
H A Dhat_i86.c1507 pgcnt_t pg_off = mmu_btop(va - mmu.kmap_addr); in hat_kmap_load() local
1524 pte_ptr = mmu.kmap_ptes + pg_off; in hat_kmap_load()
1526 pte_ptr = (x86pte32_t *)mmu.kmap_ptes + pg_off; in hat_kmap_load()
2851 pgcnt_t pg_off; in hat_probe() local
2863 pg_off = mmu_btop(vaddr - mmu.kmap_addr); in hat_probe()
2865 return (PTE_ISVALID(mmu.kmap_ptes[pg_off])); in hat_probe()
2868 ((x86pte32_t *)mmu.kmap_ptes)[pg_off])); in hat_probe()
/titanic_41/usr/src/uts/sun4u/io/pci/
H A Dpci_dma.c52 dvma_addr_t dvma_addr, pg_off; in pci_sc_pg_inv() local
58 pg_off = mp->dmai_offset; /* start min */ in pci_sc_pg_inv()
59 dvma_addr = MAX(off, pg_off); /* lo */ in pci_sc_pg_inv()
60 pg_off += mp->dmai_size; /* end max */ in pci_sc_pg_inv()
61 pg_off = MIN(off + len, pg_off); /* hi */ in pci_sc_pg_inv()
62 if (dvma_addr >= pg_off) { /* lo >= hi ? */ in pci_sc_pg_inv()
69 len = pg_off - dvma_addr; /* sz = hi - lo */ in pci_sc_pg_inv()
71 pg_off = dvma_addr & IOMMU_PAGE_OFFSET; /* offset in 1st pg */ in pci_sc_pg_inv()
72 len = IOMMU_BTOPR(len + pg_off); /* # of pages */ in pci_sc_pg_inv()
73 dvma_addr ^= pg_off; in pci_sc_pg_inv()
[all …]
/titanic_41/usr/src/psm/stand/cpr/sparcv9/sun4u/
H A Dpages.c98 int dtlb_index, pg_off; in mapin_buf_pages() local
105 pg_off = off & MMU_PAGEOFFSET; in mapin_buf_pages()
106 bytes = PAGE_ROUNDUP(pg_off + datalen); in mapin_buf_pages()
108 *srcp = vaddr + pg_off; in mapin_buf_pages()
/titanic_41/usr/src/uts/sun4/io/px/
H A Dpx_dma.c635 ulong_t pg_off; in px_dvma_win() local
645 pg_off = mp->dmai_roffset; in px_dvma_win()
650 if (xfer_sz + pg_off - 1 > nocross) in px_dvma_win()
651 xfer_sz = nocross - pg_off + 1; in px_dvma_win()
656 xfer_sz, obj_sz, pg_off, redzone_sz, nocross); in px_dvma_win()
669 mp->dmai_winsize = P2ROUNDUP(xfer_sz + pg_off, MMU_PAGE_SIZE); in px_dvma_win()
675 obj_sz, pg_off, redzone_sz, xfer_sz); in px_dvma_win()
679 xfer_sz = MMU_PTOB(MMU_BTOP(xfer_sz + pg_off)); /* page align */ in px_dvma_win()
680 mp->dmai_size = xfer_sz - pg_off; /* 1st window xferrable size */ in px_dvma_win()
682 mp->dmai_nwin = (obj_sz + pg_off + xfer_sz - 1) / xfer_sz; in px_dvma_win()
/titanic_41/usr/src/uts/sun4v/io/px/
H A Dpx_lib4v.c645 off_t pg_off; in px_lib_dma_sync() local
670 pg_off = off & MMU_PAGEOFFSET; in px_lib_dma_sync()
673 pg_off, len); in px_lib_dma_sync()
678 len -= bytes_synced, pg_off = 0) { in px_lib_dma_sync()
680 MIN(len, MMU_PAGESIZE - pg_off); in px_lib_dma_sync()
683 pg_off, bytes_to_sync, sync_dir, &bytes_synced) != H_EOK) in px_lib_dma_sync()
688 MMU_PTOB(PX_GET_MP_PFN(mp, idx)) + pg_off, bytes_to_sync, in px_lib_dma_sync()
/titanic_41/usr/src/uts/common/fs/udfs/
H A Dudf_vnops.c2500 uint32_t bflgs, u_offset_t *pg_off) in ud_page_fill() argument
2630 *pg_off = contig; in ud_page_fill()