Home
last modified time | relevance | path

Searched refs:ptob (Results 1 – 25 of 96) sorted by relevance

1234

/titanic_41/usr/src/uts/sun4u/starfire/os/
H A Dcpu_sgnblk.c116 cvaddr = vmem_alloc(heap_arena, ptob(num_pages), VM_SLEEP); in cpu_sgn_mapin()
118 hat_devload(kas.a_hat, cvaddr, ptob(num_pages), in cpu_sgn_mapin()
175 hat_unload(kas.a_hat, (caddr_t)cvaddr, ptob(num_pages), in cpu_sgn_mapout()
177 vmem_free(heap_arena, (caddr_t)cvaddr, ptob(num_pages)); in cpu_sgn_mapout()
H A Dpda.c579 cvaddr = vmem_alloc(heap_arena, ptob(num_pages), VM_SLEEP); in cpu_p2o_mapin()
581 hat_devload(kas.a_hat, cvaddr, ptob(num_pages), in cpu_p2o_mapin()
619 hat_unload(kas.a_hat, (caddr_t)cvaddr, ptob(num_pages), in cpu_p2o_mapout()
621 vmem_free(heap_arena, (caddr_t)cvaddr, ptob(num_pages)); in cpu_p2o_mapout()
/titanic_41/usr/src/uts/i86pc/io/gfx_private/
H A Dgfxp_vm.c118 cvaddr = vmem_alloc(heap_arena, ptob(npages), VM_NOSLEEP); in gfxp_map_kernel_space()
129 hat_devload(kas.a_hat, cvaddr, ptob(npages), pfn, in gfxp_map_kernel_space()
151 hat_unload(kas.a_hat, base, ptob(npages), HAT_UNLOAD_UNLOCK); in gfxp_unmap_kernel_space()
152 vmem_free(heap_arena, base, ptob(npages)); in gfxp_unmap_kernel_space()
H A Dgfxp_segmap.c129 map_addr(addrp, len, ptob(pfn), 0, flags); in gfxp_ddi_segmap_setup()
/titanic_41/usr/src/uts/sun4v/vm/
H A Dmach_kpm.c146 paddr = (caddr_t)ptob(pfn); in hat_kpm_mapin_pfn()
170 paddr = ptob(pp->p_pagenum); in hat_kpm_page2va()
311 base = ptob(pbase) + kpm_vbase; in hat_kpm_walk()
312 size = ptob(pend - pbase); in hat_kpm_walk()
/titanic_41/usr/src/uts/i86pc/os/
H A Dmp_implfuncs.c231 cvaddr = device_arena_alloc(ptob(npages), VM_NOSLEEP); in psm_map_phys_new()
252 hat_unload(kas.a_hat, base, ptob(npages), HAT_UNLOAD_UNLOCK); in psm_unmap_phys()
253 device_arena_free(base, ptob(npages)); in psm_unmap_phys()
296 hat_unload(kas.a_hat, base, ptob(npages), HAT_UNLOAD_UNLOCK); in psm_unmap()
297 device_arena_free(base, ptob(npages)); in psm_unmap()
H A Dpmem.c146 if (pcp == NULL || (off + len) > ptob(pcp->dp_npages)) in devmap_pmem_setup()
177 dhp->dh_roff = ptob(btop(off)); in devmap_pmem_setup()
184 dhp->dh_len = ptob(btopr(len)); in devmap_pmem_setup()
236 if (pcp == NULL || (off + len) > ptob(pcp->dp_npages)) in devmap_pmem_remap()
262 dhp->dh_roff = ptob(btop(off)); in devmap_pmem_remap()
263 dhp->dh_len = ptob(btopr(len)); in devmap_pmem_remap()
363 if ((pp = page_create_va(pcp->dp_vnp, pmem_off, ptob(i), in devmap_pmem_alloc()
406 i_ddi_decr_locked_memory(pcp->dp_proc, ptob(pcp->dp_npages)); in devmap_pmem_alloc()
496 i_ddi_decr_locked_memory(curproc, ptob(pcp->dp_npages)); in devmap_pmem_free()
560 if (i_ddi_incr_locked_memory(p, ptob(n)) != 0) { in pmem_lock()
/titanic_41/usr/src/uts/common/fs/nfs/
H A Dnfs_dump.c118 for (i = 0; i < count; i += ptod(1), addr += ptob(1)) { in nfs_dump()
235 int tsize = ptob(1); in nd_send_data()
244 if (!(dumpbuf = kmem_alloc(ptob(1), KM_NOSLEEP))) { in nd_send_data()
329 mblk_p->b_cont = esballoc((uchar_t *)dumpbuf, ptob(1), BPRI_HI, &frnop); in nd_send_data()
335 mblk_p->b_cont->b_wptr += ptob(1); in nd_send_data()
/titanic_41/usr/src/uts/common/sys/
H A Dddi.h129 #undef ptob
133 extern unsigned long ptob(unsigned long);
H A Dparam.h442 #define ptob(x) (((pgcnt_t)(x)) << PAGESHIFT) macro
444 #define ptob(x) ((x) << PAGESHIFT) macro
/titanic_41/usr/src/uts/sun4v/io/
H A Ddr_mem.c736 mqp->mq.phys_pages = ptob(mq.phys_pages); in dr_mem_query()
737 mqp->mq.managed = ptob(mq.managed); in dr_mem_query()
738 mqp->mq.nonrelocatable = ptob(mq.nonrelocatable); in dr_mem_query()
739 mqp->mq.first_nonrelocatable = ptob(mq.first_nonrelocatable); in dr_mem_query()
740 mqp->mq.last_nonrelocatable = ptob(mq.last_nonrelocatable); in dr_mem_query()
968 stat->phys_pages = ptob(del_stat.phys_pages); in dr_mem_del_stat()
969 stat->managed = ptob(del_stat.managed); in dr_mem_del_stat()
970 stat->collected = ptob(del_stat.collected); in dr_mem_del_stat()
1190 if ((rv = memlist_add_span(ptob(base), ptob(npgs), &d_ml)) in mem_del()
/titanic_41/usr/src/uts/sun4v/os/
H A Dmpo.c195 ASSERT(IS_P2ALIGNED(ptob(base), TTEBYTES(TTE256M))); \
196 ASSERT(IS_P2ALIGNED(ptob(end - base + 1), TTEBYTES(TTE256M))); \
647 stripe = ptob(mnode_pages); in lgrp_traverse()
1358 if (ptob(mnode_pages) < max_coalesce_len) { in valid_pages()
1360 "mnode slice = %lx\n", max_coalesce_len, ptob(mnode_pages)); in valid_pages()
1736 mblock[i].size = ptob(ubase - base); in mblock_update_del()
1738 mblock[i + 1].base = ptob(uend + 1); in mblock_update_del()
1739 mblock[i + 1].size = ptob(end - uend); in mblock_update_del()
1746 mblock[i].base = ptob(uend + 1); in mblock_update_del()
1747 mblock[i].size -= ptob(uend - ubase + 1); in mblock_update_del()
[all …]
H A Dmemseg.c151 if ((page_create_va(&mpvp, (u_offset_t)pp, ptob(metapgs), in memseg_alloc_meta()
203 off = (u_offset_t)ptp + ptob(metapg); in memseg_get_metapfn()
/titanic_41/usr/src/uts/common/vm/
H A Dpage_retire.c627 pa = ptob((uint64_t)page_pptonum(pp)); in page_clear_transient_ue()
817 kspmem->address = ptob(pp->p_pagenum); in pr_list_kstat_snapshot()
824 kspmem->address = ptob(pp->p_pagenum); in pr_list_kstat_snapshot()
1290 (void) page_retire(ptob(cpp->p_pagenum), PR_FMA); in page_retire_test()
1294 (void) page_retire(ptob(cpp->p_pagenum), PR_FMA); in page_retire_test()
1295 (void) page_retire(ptob(cpp2->p_pagenum), PR_FMA); in page_retire_test()
H A Dseg_spt.c447 NULL, 0, ptob(npages)); in segspt_create()
474 if (anon_resv_zone(ptob(more_pgs), zone) == 0) { in segspt_create()
485 ASSERT(amp->swresv == ptob(npages)); in segspt_create()
486 amp->swresv = amp->size = ptob(new_npgs); in segspt_create()
494 sptd->spt_realsize = ptob(npages); in segspt_create()
502 if ((err = anon_map_createpages(amp, anon_index, ptob(npages), ppa, in segspt_create()
569 sz = MIN(pgsz, ptob(npages - pidx)); in segspt_create()
592 sptd->spt_realsize = ptob(npages); in segspt_create()
1882 segspt_addr = sptseg->s_base + ptob(an_idx); in segspt_dismfault()
1884 ASSERT((segspt_addr + ptob(npages)) <= in segspt_dismfault()
[all …]
H A Dseg_dev.c983 dhp->dh_roff < ptob(pcp->dp_npages)); in devmap_handle_reduce_len()
1502 dhp->dh_roff < ptob(pcp->dp_npages)); in segdev_faultpage()
2451 error = choose_addr(as, addrp, len, ptob(pfn), ADDR_NOVACALIGN, flags); in ddi_segmap_setup()
2539 aligned_off = (offset_t)ptob(maxdhp->dh_pfn) - offset; in devmap_device()
2544 aligned_off = (offset_t)ptob(pfn) - offset; in devmap_device()
2728 base = (ulong_t)ptob(dhp->dh_pfn); in devmap_roundup()
2887 *laddr = dhp->dh_uvaddr + ptob(pfn - dhp->dh_pfn); in devmap_get_large_pgsize()
2892 off = ptob(pfn - dhp->dh_pfn) + pgsize; in devmap_get_large_pgsize()
3352 base = (ulong_t)ptob(dhp->dh_pfn); in devmap_devmem_large_page_setup()
3451 dhp->dh_len = ptob(btopr(len)); in devmap_devmem_setup()
[all …]
H A Dvm_swap.c295 poff = ptob(slotnumber); in swap_getoff()
298 aoff, boff, ptob(slotnumber), (long)sip->si_eoff); in swap_getoff()
335 ptob(pagenumber) + sip->si_soff); in swap_phys_free()
417 soff = ptob(btopr(lowblk << SCTRSHFT)); /* must be page aligned */ in swapdel_byname()
1200 soff = ptob(btopr(soff)); in swapadd()
1201 eoff = ptob(btop(eoff)); in swapadd()
1383 soff = ptob(btopr(lowblk << SCTRSHFT)); /* must be page aligned */ in swapdel()
/titanic_41/usr/src/uts/common/os/
H A Dmem_config.c284 mapva = vmem_alloc(heap_arena, ptob(metapgs), VM_NOSLEEP); in kphysm_add_memory_dynamic()
309 hat_devload(kas.a_hat, vaddr, ptob(1), pfn, in kphysm_add_memory_dynamic()
313 vaddr += ptob(1); in kphysm_add_memory_dynamic()
323 hat_unload(kas.a_hat, (caddr_t)pp, ptob(metapgs), in kphysm_add_memory_dynamic()
326 vmem_free(heap_arena, mapva, ptob(metapgs)); in kphysm_add_memory_dynamic()
361 hat_unload(kas.a_hat, (caddr_t)pp, ptob(metapgs), in kphysm_add_memory_dynamic()
364 vmem_free(heap_arena, mapva, ptob(metapgs)); in kphysm_add_memory_dynamic()
445 bzero((caddr_t)pp, ptob(metapgs)); in kphysm_add_memory_dynamic()
468 hat_devload(kas.a_hat, vaddr, ptob(1), pfn, in kphysm_add_memory_dynamic()
472 vaddr += ptob(1); in kphysm_add_memory_dynamic()
[all …]
H A Dmove.c520 pa = ptob((uint64_t)hat_getpfnum(kas.a_hat, p)); in uioamove()
524 pa = ptob((uint64_t)( in uioamove()
528 pa = ptob((uint64_t)( in uioamove()
/titanic_41/usr/src/uts/common/io/
H A Ddump.c105 size = ptob((uint64_t)physmem) / DUMP_COMPRESS_RATIO; in dump_ioctl()
133 size = ptob(dumpsize_in_pages) / DUMP_COMPRESS_RATIO; in dump_ioctl()
/titanic_41/usr/src/uts/sun4/io/
H A Drootnex.c370 ptob(npages), VM_NOSLEEP)) == NULL) in rootnex_map_regspec()
376 hat_devload(kas.a_hat, kaddr, ptob(npages), pfn, in rootnex_map_regspec()
415 hat_unload(kas.a_hat, base, ptob(npages), HAT_UNLOAD_UNLOCK); in rootnex_unmap_regspec()
416 vmem_free(rootnex_regspec_arena, base, ptob(npages)); in rootnex_unmap_regspec()
868 *(ulong_t *)result = ptob(*(ulong_t *)arg); in rootnex_ctlops()
/titanic_41/usr/src/uts/common/sys/scsi/adapters/pmcs/
H A Dpmcs_param.h63 #define PMCS_CONTROL_SIZE ptob(1)
/titanic_41/usr/src/uts/sun4u/serengeti/io/
H A Dsbdp_cpu.c753 kaddr = vmem_alloc(heap_arena, ptob(npages), VM_NOSLEEP); in cpusram_map()
761 hat_devload(kas.a_hat, kaddr, ptob(npages), pfn, mapping_attr, in cpusram_map()
780 hat_unload(kas.a_hat, base, ptob(npages), HAT_UNLOAD_UNLOCK); in cpusram_unmap()
781 vmem_free(heap_arena, base, ptob(npages)); in cpusram_unmap()
/titanic_41/usr/src/uts/common/avs/ns/sdbc/
H A Dsd_conf.c739 page_size = (int)ptob(1);
741 if ((fmt_page = (caddr_t)nsc_kmem_alloc(ptob(1),
750 bcopy(fmt_page, addr, ptob(1));
/titanic_41/usr/src/lib/libbc/inc/include/sun4/
H A Dparam.h129 #define ptob(x) ((x) << PAGESHIFT) macro

1234