Home
last modified time | relevance | path

Searched refs:va (Results 1 – 25 of 254) sorted by relevance

1234567891011

/illumos-gate/usr/src/uts/sun4v/os/
H A Dppage.c101 caddr_t va; in ppmapinit() local
105 va = (caddr_t)PPMAPBASE; in ppmapinit()
116 (caddr_t)((uintptr_t)va + (nset * MMU_PAGESIZE)); in ppmapinit()
146 caddr_t va; in ppmapin() local
157 va = ppmap_vaddrs[nset]; in ppmapin()
158 if (va != NULL) { in ppmapin()
162 if (atomic_cas_ptr(&ppmap_vaddrs[nset], va, NULL) == in ppmapin()
163 va) { in ppmapin()
164 hat_memload(kas.a_hat, va, pp, in ppmapin()
167 return (va); in ppmapin()
[all …]
/illumos-gate/usr/src/uts/sun4u/os/
H A Dppage.c100 caddr_t va; in ppmapinit() local
104 va = (caddr_t)PPMAPBASE; in ppmapinit()
128 (caddr_t)((uintptr_t)va + (nset * setsize)); in ppmapinit()
130 va += MMU_PAGESIZE; in ppmapinit()
159 caddr_t va; in ppmapin() local
185 va = ppmap_vaddrs[index]; in ppmapin()
186 if (va != NULL) { in ppmapin()
191 va, NULL) == va) { in ppmapin()
192 hat_memload(kas.a_hat, va, pp, in ppmapin()
195 return (va); in ppmapin()
[all …]
/illumos-gate/usr/src/uts/sun4u/vm/
H A Dmach_sfmmu.c156 caddr_t va = ktsb_base; in sfmmu_remap_kernel() local
160 ASSERT(va >= datava + MMU_PAGESIZE4M); in sfmmu_remap_kernel()
163 ASSERT(IS_P2ALIGNED(va, tsbsz)); in sfmmu_remap_kernel()
167 pfn = va_to_pfn(va); in sfmmu_remap_kernel()
181 sfmmu_tteload(kas.a_hat, &tte, va, NULL, flags); in sfmmu_remap_kernel()
183 va += MMU_PAGESIZE4M; in sfmmu_remap_kernel()
200 caddr_t va; in sfmmu_clear_user_tsbs() local
204 va = utsb_vabase; in sfmmu_clear_user_tsbs()
205 end_va = va + tsb_slab_size; in sfmmu_clear_user_tsbs()
206 while (va < end_va) { in sfmmu_clear_user_tsbs()
[all …]
/illumos-gate/usr/src/uts/i86pc/vm/
H A Di86_mmu.c85 uintptr_t va = des_va; in va_to_pfn() local
93 if (kbm_probe(&va, &len, &pfn, &prot) == 0) in va_to_pfn()
95 if (va > des_va) in va_to_pfn()
97 if (va < des_va) in va_to_pfn()
98 pfn += mmu_btop(des_va - va); in va_to_pfn()
118 uintptr_t va; in hat_kmap_init() local
145 for (va = map_addr, i = 0; i < htable_cnt; va += LEVEL_SIZE(1), ++i) { in hat_kmap_init()
146 ht = htable_create(kas.a_hat, va, 0, NULL); in hat_kmap_init()
233 uintptr_t va = 0; in hat_kern_alloc() local
326 while (kbm_probe(&va, &size, &pfn, &prot) != 0) { in hat_kern_alloc()
[all …]
H A Dhat_kdi.c150 kdi_vtop(uintptr_t va, uint64_t *pap) in kdi_vtop() argument
152 uintptr_t vaddr = va; in kdi_vtop()
167 if (vaddr > va) in kdi_vtop()
169 if (vaddr < va) in kdi_vtop()
170 pfn += mmu_btop(va - vaddr); in kdi_vtop()
185 index = (va >> LEVEL_SHIFT(level)) & (mmu.ptes_per_table - 1); in kdi_vtop()
202 *pap += va & LEVEL_OFFSET(level); in kdi_vtop()
212 caddr_t va; in kdi_prw() local
229 va = (caddr_t)hat_kdi_page + pgoff; in kdi_prw()
232 from = va; in kdi_prw()
[all …]
H A Dkboot_mmu.c156 kbm_map(uintptr_t va, paddr_t pa, uint_t level, uint_t is_kernel) in kbm_map() argument
175 if (HYPERVISOR_update_va_mapping(va, pteval, in kbm_map()
184 ptep = find_pte(va, &pte_physaddr, level, 0); in kbm_map()
189 if (HYPERVISOR_update_va_mapping(va, pteval, UVMF_INVLPG | UVMF_LOCAL)) in kbm_map()
196 mmu_invlpg((caddr_t)va); in kbm_map()
206 kbm_map_ma(maddr_t ma, uintptr_t va, uint_t level) in kbm_map_ma() argument
218 if (HYPERVISOR_update_va_mapping(va, in kbm_map_ma()
226 (void) find_pte(va, &pte_physaddr, level, 0); in kbm_map_ma()
228 if (HYPERVISOR_update_va_mapping(va, in kbm_map_ma()
246 kbm_probe(uintptr_t *va, size_t *len, pfn_t *pfn, uint_t *prot) in kbm_probe() argument
[all …]
H A Dhtable.h105 #define HTABLE_HASH(hat, va, lvl) \ argument
106 ((((va) >> LEVEL_SHIFT(1)) + ((va) >> 28) + (lvl) + \
144 #define NEXT_ENTRY_VA(va, l) \ argument
145 ((va & LEVEL_MASK(l)) + LEVEL_SIZE(l) == mmu.hole_start ? \
146 mmu.hole_end : (va & LEVEL_MASK(l)) + LEVEL_SIZE(l))
156 #define NEXT_ENTRY_VA(va, l) ((va & LEVEL_MASK(l)) + LEVEL_SIZE(l)) argument
207 extern htable_t *htable_getpage(struct hat *hat, uintptr_t va, uint_t *entry);
255 extern x86pte_t htable_walk(struct hat *hat, htable_t **ht, uintptr_t *va,
263 extern uint_t htable_va2entry(uintptr_t va, htable_t *ht);
307 extern void xen_flush_va(caddr_t va);
[all …]
H A Dkboot_mmu.h57 extern int kbm_probe(uintptr_t *va, size_t *len, pfn_t *pfn, uint_t *prot);
62 extern void kbm_map(uintptr_t va, paddr_t pa, uint_t level, uint_t is_kernel);
65 extern void kbm_map_ma(maddr_t ma, uintptr_t va, uint_t level);
71 extern void kbm_unmap(uintptr_t va);
77 extern pfn_t kbm_remap(uintptr_t va, pfn_t pfn);
82 extern void kbm_read_only(uintptr_t va, paddr_t pa);
H A Dhat_i86.c460 uintptr_t va; in hat_alloc() local
556 for (va = rp->hkr_start_va; va != rp->hkr_end_va; in hat_alloc()
557 va += cnt * LEVEL_SIZE(rp->hkr_level)) { in hat_alloc()
562 ht = htable_create(hat, va, rp->hkr_level, in hat_alloc()
565 start = htable_va2entry(va, ht); in hat_alloc()
567 eva = va + in hat_alloc()
574 src = htable_lookup(kas.a_hat, va, rp->hkr_level); in hat_alloc()
734 hat_kernelbase(uintptr_t va) in hat_kernelbase() argument
736 if (IN_VA_HOLE(va)) in hat_kernelbase()
737 panic("_userlimit %p will fall in VA hole\n", (void *)va); in hat_kernelbase()
[all …]
H A Dhtable.c134 xen_flush_va(caddr_t va) in xen_flush_va() argument
140 mmu_flush_tlb_page((uintptr_t)va); in xen_flush_va()
143 t.arg1.linear_addr = (uintptr_t)va; in xen_flush_va()
151 xen_gflush_va(caddr_t va, cpuset_t cpus) in xen_gflush_va() argument
157 mmu_flush_tlb_page((uintptr_t)va); in xen_gflush_va()
162 t.arg1.linear_addr = (uintptr_t)va; in xen_gflush_va()
254 xen_map(uint64_t pte, caddr_t va) in xen_map() argument
256 if (HYPERVISOR_update_va_mapping((uintptr_t)va, pte, in xen_map()
442 uintptr_t va; in htable_steal_active() local
473 for (e = 0, va = ht->ht_vaddr; in htable_steal_active()
[all …]
/illumos-gate/usr/src/lib/libumem/common/
H A Dmisc.c175 va_list va; in log_message() local
177 va_start(va, format); in log_message()
178 (void) vsnprintf(buf, UMEM_MAX_ERROR_SIZE-1, format, va); in log_message()
179 va_end(va); in log_message()
195 va_list va; in debug_printf() local
197 va_start(va, format); in debug_printf()
198 (void) vsnprintf(buf, UMEM_MAX_ERROR_SIZE-1, format, va); in debug_printf()
199 va_end(va); in debug_printf()
206 umem_vprintf(const char *format, va_list va) in umem_vprintf() argument
210 (void) vsnprintf(buf, UMEM_MAX_ERROR_SIZE-1, format, va); in umem_vprintf()
[all …]
H A Dumem_fail.c107 va_list va; in umem_panic() local
109 va_start(va, format); in umem_panic()
110 umem_vprintf(format, va); in umem_panic()
111 va_end(va); in umem_panic()
124 va_list va; in umem_err_recoverable() local
126 va_start(va, format); in umem_err_recoverable()
127 umem_vprintf(format, va); in umem_err_recoverable()
128 va_end(va); in umem_err_recoverable()
/illumos-gate/usr/src/uts/common/fs/nfs/
H A Dnfs_acl_srv.c84 vattr_t va; in acl2_getacl() local
123 va.va_mask = AT_ALL; in acl2_getacl()
124 error = rfs4_delegated_getattr(vp, &va, 0, cr); in acl2_getacl()
130 error = vattr_to_nattr(&va, &resp->resok.attr); in acl2_getacl()
198 vattr_t va; in acl2_setacl() local
221 va.va_mask = AT_ALL; in acl2_setacl()
222 error = rfs4_delegated_getattr(vp, &va, 0, cr); in acl2_setacl()
229 error = vattr_to_nattr(&va, &resp->resok.attr); in acl2_setacl()
253 vattr_t va; in acl2_getattr() local
261 va.va_mask = AT_ALL; in acl2_getattr()
[all …]
H A Dnfs_srv.c135 struct vattr va; in rfs_getattr() local
146 va.va_mask = AT_ALL; /* we want all the attributes */ in rfs_getattr()
148 error = rfs4_delegated_getattr(vp, &va, 0, cr); in rfs_getattr()
154 va.va_type = VLNK; in rfs_getattr()
156 acl_perm(vp, exi, &va, cr); in rfs_getattr()
157 error = vattr_to_nattr(&va, &ns->ns_attr); in rfs_getattr()
184 struct vattr va; in rfs_setattr() local
202 error = sattr_to_vattr(&args->saa_sa, &va); in rfs_setattr()
221 if (va.va_mask & AT_MTIME) { in rfs_setattr()
222 if (va.va_mtime.tv_nsec == 1000000000) { in rfs_setattr()
[all …]
H A Dnfs3_srv.c111 struct vattr va; in rfs3_getattr() local
124 va.va_mask = AT_ALL; in rfs3_getattr()
125 error = rfs4_delegated_getattr(vp, &va, 0, cr); in rfs3_getattr()
130 va.va_type = VLNK; in rfs3_getattr()
133 error = vattr_to_fattr3(&va, &resp->resok.obj_attributes); in rfs3_getattr()
395 struct vattr va; in rfs3_lookup() local
564 va.va_mask = AT_ALL; in rfs3_lookup()
565 vap = rfs4_delegated_getattr(vp, &va, 0, cr) ? NULL : &va; in rfs3_lookup()
624 struct vattr va; in rfs3_access() local
660 va.va_mask = AT_MODE; in rfs3_access()
[all …]
/illumos-gate/usr/src/stand/lib/fs/nfs/
H A Dnfsops.c371 struct vattr va; in boot_nfs_fstat() local
388 bzero((char *)&va, sizeof (va)); in boot_nfs_fstat()
389 va.va_mask = AT_TYPE | AT_SIZE | AT_MODE | AT_NODEID | in boot_nfs_fstat()
394 status = nfsgetattr(&filep->file, &va); in boot_nfs_fstat()
397 status = nfs3getattr(&filep->file, &va); in boot_nfs_fstat()
400 status = nfs4getattr(&filep->file, &va); in boot_nfs_fstat()
412 if (va.va_size > (u_offset_t)MAXOFF_T) { in boot_nfs_fstat()
416 stp->st_size = (off_t)va.va_size; in boot_nfs_fstat()
417 stp->st_mode = VTTOIF(va.va_type) | va.va_mode; in boot_nfs_fstat()
418 stp->st_atim.tv_sec = va.va_atime.tv_sec; in boot_nfs_fstat()
[all …]
/illumos-gate/usr/src/cmd/svc/svccfg/
H A Dsvccfg_main.c105 vmessage(const char *fmt, va_list va) in vmessage() argument
119 if (vfprintf(strm, fmt, va) < 0 && interactive) in vmessage()
134 va_list va; in warn() local
136 va_start(va, fmt); in warn()
137 vmessage(fmt, va); in warn()
138 va_end(va); in warn()
165 va_list va; in semerr() local
167 va_start(va, fmt); in semerr()
168 vmessage(fmt, va); in semerr()
169 va_end(va); in semerr()
/illumos-gate/usr/src/uts/i86pc/os/
H A Dppage.c55 caddr_t va; in ppmapin() local
57 va = vmem_alloc(heap_arena, PAGESIZE, VM_SLEEP); in ppmapin()
58 hat_memload(kas.a_hat, va, pp, vprot | HAT_NOSYNC, HAT_LOAD_LOCK); in ppmapin()
59 return (va); in ppmapin()
63 ppmapout(caddr_t va) in ppmapout() argument
65 hat_unload(kas.a_hat, va, PAGESIZE, HAT_UNLOAD_UNLOCK); in ppmapout()
66 vmem_free(heap_arena, va, PAGESIZE); in ppmapout()
/illumos-gate/usr/src/uts/i86xpv/os/
H A Dxpv_panic.c188 uintptr_t va = *vaddr; in xpv_va_walk() local
199 if (va != lastva + MMU_PAGESIZE) in xpv_va_walk()
205 while (va < xpv_end && va >= *vaddr) { in xpv_va_walk()
223 idx = (va >> LEVEL_SHIFT(l)) & (xpv_panic_nptes[l] - 1); in xpv_va_walk()
224 scan_va = va; in xpv_va_walk()
241 va = NEXT_ENTRY_VA(va, l + 1); in xpv_va_walk()
245 va = scan_va; in xpv_va_walk()
249 if (va >= xpv_end || va < *vaddr) in xpv_va_walk()
266 (va & MMU_PAGEMASK)) { in xpv_va_walk()
267 va += MMU_PAGESIZE; in xpv_va_walk()
[all …]
/illumos-gate/usr/src/test/libc-tests/tests/err/
H A Derr.c56 va_list va; in xtest() local
58 va_start(va, fmt); in xtest()
64 verr(exitcode, fmt, va); in xtest()
66 vwarn(fmt, va); in xtest()
71 verrc(exitcode, errcode, fmt, va); in xtest()
73 vwarnc(errcode, fmt, va); in xtest()
77 verrx(exitcode, fmt, va); in xtest()
79 vwarnx(fmt, va); in xtest()
85 va_end(va); in xtest()
/illumos-gate/usr/src/uts/sun4v/vm/
H A Dmach_sfmmu.c203 caddr_t va = ktsb_base; in sfmmu_remap_kernel() local
207 ASSERT(va >= datava + MMU_PAGESIZE4M); in sfmmu_remap_kernel()
210 ASSERT(IS_P2ALIGNED(va, tsbsz)); in sfmmu_remap_kernel()
214 pfn = va_to_pfn(va); in sfmmu_remap_kernel()
228 sfmmu_tteload(kas.a_hat, &tte, va, NULL, flags); in sfmmu_remap_kernel()
230 va += MMU_PAGESIZE4M; in sfmmu_remap_kernel()
253 caddr_t va = ktsb_base; in sfmmu_set_tlb() local
259 (void) hv_mmu_map_perm_addr(va, KCONTEXT, tte, in sfmmu_set_tlb()
261 va += MMU_PAGESIZE4M; in sfmmu_set_tlb()
289 kdi_tlb_page_lock(caddr_t va, int do_dtlb) in kdi_tlb_page_lock() argument
[all …]
/illumos-gate/usr/src/contrib/bhyve/amd64/machine/
H A Dpmap.h267 #define vtophys(va) pmap_kextract(((vm_offset_t) (va))) argument
351 int pmap_emulate_accessed_dirty(pmap_t pmap, vm_offset_t va, int ftype);
391 #define pmap_unmapbios(va, sz) pmap_unmapdev((va), (sz)) argument
401 void pmap_kenter(vm_offset_t va, vm_paddr_t pa);
419 void pmap_get_mapping(pmap_t pmap, vm_offset_t va, uint64_t *ptr, int *num);
426 pmap_pte_index(vm_offset_t va) in pmap_pte_index() argument
429 return ((va >> PAGE_SHIFT) & ((1ul << NPTEPGSHIFT) - 1)); in pmap_pte_index()
433 pmap_pde_index(vm_offset_t va) in pmap_pde_index() argument
436 return ((va >> PDRSHIFT) & ((1ul << NPDEPGSHIFT) - 1)); in pmap_pde_index()
440 pmap_pdpe_index(vm_offset_t va) in pmap_pdpe_index() argument
[all …]
/illumos-gate/usr/src/boot/i386/btx/lib/
H A Dbtxv86.h61 #define VTOP(va) ((vm_offset_t)(va) + __base) argument
62 #define VTOPSEG(va) (u_int16_t)(VTOP((caddr_t)va) >> 4) argument
63 #define VTOPOFF(va) (u_int16_t)(VTOP((caddr_t)va) & 0xf) argument
/illumos-gate/usr/src/uts/common/io/i40e/
H A Di40e_osdep.c28 mem->va = kmem_zalloc(size, KM_SLEEP); in i40e_allocate_virt_mem()
37 if (mem->va != NULL) in i40e_free_virt_mem()
38 kmem_free(mem->va, mem->size); in i40e_free_virt_mem()
81 NULL, (caddr_t *)&mem->va, &len, &mem->idm_acc_handle); in i40e_allocate_dma_mem()
84 mem->va = NULL; in i40e_allocate_dma_mem()
94 bzero(mem->va, len); in i40e_allocate_dma_mem()
96 rc = ddi_dma_addr_bind_handle(mem->idm_dma_handle, NULL, mem->va, len, in i40e_allocate_dma_mem()
104 mem->va = NULL; in i40e_allocate_dma_mem()
149 mem->va = NULL; in i40e_free_dma_mem()
161 ASSERT(mem->va == NULL); in i40e_free_dma_mem()
/illumos-gate/usr/src/uts/sun4/io/
H A Dtrapstat.c616 caddr_t va = tcpu->tcpu_vabase; in trapstat_load_tlb() local
622 for (i = 0; i < tstat_total_pages; i++, va += MMU_PAGESIZE) { in trapstat_load_tlb()
628 sfmmu_itlb_ld_kva(va, &tte); in trapstat_load_tlb()
633 sfmmu_dtlb_ld_kva(va, &tte); in trapstat_load_tlb()
642 ret = hv_mmu_map_perm_addr(va, KCONTEXT, *(uint64_t *)&tte, in trapstat_load_tlb()
655 va = tcpu->tcpu_vabase; in trapstat_load_tlb()
657 (void) hv_mmu_unmap_perm_addr(va, in trapstat_load_tlb()
659 va += MMU_PAGESIZE4M; in trapstat_load_tlb()
672 va += MMU_PAGESIZE4M; in trapstat_load_tlb()
833 caddr_t va; in trapstat_probe_alloc() local
[all …]

1234567891011