Home
last modified time | relevance | path

Searched refs:va (Results 1 – 25 of 248) sorted by relevance

12345678910

/titanic_44/usr/src/uts/sun4v/os/
H A Dppage.c101 caddr_t va; in ppmapinit() local
105 va = (caddr_t)PPMAPBASE; in ppmapinit()
116 (caddr_t)((uintptr_t)va + (nset * MMU_PAGESIZE)); in ppmapinit()
146 caddr_t va; in ppmapin() local
157 va = ppmap_vaddrs[nset]; in ppmapin()
158 if (va != NULL) { in ppmapin()
162 if (atomic_cas_ptr(&ppmap_vaddrs[nset], va, NULL) == in ppmapin()
163 va) { in ppmapin()
164 hat_memload(kas.a_hat, va, pp, in ppmapin()
167 return (va); in ppmapin()
[all …]
/titanic_44/usr/src/uts/sun4u/os/
H A Dppage.c100 caddr_t va; in ppmapinit() local
104 va = (caddr_t)PPMAPBASE; in ppmapinit()
128 (caddr_t)((uintptr_t)va + (nset * setsize)); in ppmapinit()
130 va += MMU_PAGESIZE; in ppmapinit()
159 caddr_t va; in ppmapin() local
185 va = ppmap_vaddrs[index]; in ppmapin()
186 if (va != NULL) { in ppmapin()
191 va, NULL) == va) { in ppmapin()
192 hat_memload(kas.a_hat, va, pp, in ppmapin()
195 return (va); in ppmapin()
[all …]
/titanic_44/usr/src/uts/sun4u/vm/
H A Dmach_sfmmu.c156 caddr_t va = ktsb_base; in sfmmu_remap_kernel() local
160 ASSERT(va >= datava + MMU_PAGESIZE4M); in sfmmu_remap_kernel()
163 ASSERT(IS_P2ALIGNED(va, tsbsz)); in sfmmu_remap_kernel()
167 pfn = va_to_pfn(va); in sfmmu_remap_kernel()
181 sfmmu_tteload(kas.a_hat, &tte, va, NULL, flags); in sfmmu_remap_kernel()
183 va += MMU_PAGESIZE4M; in sfmmu_remap_kernel()
200 caddr_t va; in sfmmu_clear_user_tsbs() local
204 va = utsb_vabase; in sfmmu_clear_user_tsbs()
205 end_va = va + tsb_slab_size; in sfmmu_clear_user_tsbs()
206 while (va < end_va) { in sfmmu_clear_user_tsbs()
[all …]
/titanic_44/usr/src/uts/i86pc/vm/
H A Dhat_kdi.c150 kdi_vtop(uintptr_t va, uint64_t *pap) in kdi_vtop() argument
152 uintptr_t vaddr = va; in kdi_vtop()
167 if (vaddr > va) in kdi_vtop()
169 if (vaddr < va) in kdi_vtop()
170 pfn += mmu_btop(va - vaddr); in kdi_vtop()
185 index = (va >> LEVEL_SHIFT(level)) & (mmu.ptes_per_table - 1); in kdi_vtop()
202 *pap += va & LEVEL_OFFSET(level); in kdi_vtop()
212 caddr_t va; in kdi_prw() local
229 va = (caddr_t)hat_kdi_page + pgoff; in kdi_prw()
232 from = va; in kdi_prw()
[all …]
H A Dkboot_mmu.c158 kbm_map(uintptr_t va, paddr_t pa, uint_t level, uint_t is_kernel) in kbm_map() argument
177 if (HYPERVISOR_update_va_mapping(va, pteval, in kbm_map()
186 ptep = find_pte(va, &pte_physaddr, level, 0); in kbm_map()
191 if (HYPERVISOR_update_va_mapping(va, pteval, UVMF_INVLPG | UVMF_LOCAL)) in kbm_map()
198 mmu_tlbflush_entry((caddr_t)va); in kbm_map()
208 kbm_map_ma(maddr_t ma, uintptr_t va, uint_t level) in kbm_map_ma() argument
220 if (HYPERVISOR_update_va_mapping(va, in kbm_map_ma()
228 (void) find_pte(va, &pte_physaddr, level, 0); in kbm_map_ma()
230 if (HYPERVISOR_update_va_mapping(va, in kbm_map_ma()
248 kbm_probe(uintptr_t *va, size_t *len, pfn_t *pfn, uint_t *prot) in kbm_probe() argument
[all …]
H A Dhtable.h104 #define HTABLE_HASH(hat, va, lvl) \ argument
105 ((((va) >> LEVEL_SHIFT(1)) + ((va) >> 28) + (lvl) + \
137 #define NEXT_ENTRY_VA(va, l) \ argument
138 ((va & LEVEL_MASK(l)) + LEVEL_SIZE(l) == mmu.hole_start ? \
139 mmu.hole_end : (va & LEVEL_MASK(l)) + LEVEL_SIZE(l))
149 #define NEXT_ENTRY_VA(va, l) ((va & LEVEL_MASK(l)) + LEVEL_SIZE(l)) argument
200 extern htable_t *htable_getpage(struct hat *hat, uintptr_t va, uint_t *entry);
248 extern x86pte_t htable_walk(struct hat *hat, htable_t **ht, uintptr_t *va,
256 extern uint_t htable_va2entry(uintptr_t va, htable_t *ht);
300 extern void xen_flush_va(caddr_t va);
[all …]
H A Dkboot_mmu.h59 extern int kbm_probe(uintptr_t *va, size_t *len, pfn_t *pfn, uint_t *prot);
64 extern void kbm_map(uintptr_t va, paddr_t pa, uint_t level, uint_t is_kernel);
67 extern void kbm_map_ma(maddr_t ma, uintptr_t va, uint_t level);
73 extern void kbm_unmap(uintptr_t va);
79 extern pfn_t kbm_remap(uintptr_t va, pfn_t pfn);
84 extern void kbm_read_only(uintptr_t va, paddr_t pa);
H A Di86_mmu.c166 uintptr_t va = des_va; in va_to_pfn() local
174 if (kbm_probe(&va, &len, &pfn, &prot) == 0) in va_to_pfn()
176 if (va > des_va) in va_to_pfn()
178 if (va < des_va) in va_to_pfn()
179 pfn += mmu_btop(des_va - va); in va_to_pfn()
199 uintptr_t va; in hat_kmap_init() local
226 for (va = map_addr, i = 0; i < htable_cnt; va += LEVEL_SIZE(1), ++i) { in hat_kmap_init()
227 ht = htable_create(kas.a_hat, va, 0, NULL); in hat_kmap_init()
314 uintptr_t va = 0; in hat_kern_alloc() local
407 while (kbm_probe(&va, &size, &pfn, &prot) != 0) { in hat_kern_alloc()
[all …]
H A Dhat_i86.c250 uintptr_t va; in hat_alloc() local
318 for (va = rp->hkr_start_va; va != rp->hkr_end_va; in hat_alloc()
319 va += cnt * LEVEL_SIZE(rp->hkr_level)) { in hat_alloc()
324 ht = htable_create(hat, va, rp->hkr_level, in hat_alloc()
327 start = htable_va2entry(va, ht); in hat_alloc()
329 eva = va + in hat_alloc()
344 src = htable_lookup(kas.a_hat, va, rp->hkr_level); in hat_alloc()
476 hat_kernelbase(uintptr_t va) in hat_kernelbase() argument
479 va &= LEVEL_MASK(1); in hat_kernelbase()
481 if (IN_VA_HOLE(va)) in hat_kernelbase()
[all …]
H A Dhtable.c134 xen_flush_va(caddr_t va) in xen_flush_va() argument
140 mmu_tlbflush_entry((caddr_t)va); in xen_flush_va()
143 t.arg1.linear_addr = (uintptr_t)va; in xen_flush_va()
151 xen_gflush_va(caddr_t va, cpuset_t cpus) in xen_gflush_va() argument
157 mmu_tlbflush_entry((caddr_t)va); in xen_gflush_va()
162 t.arg1.linear_addr = (uintptr_t)va; in xen_gflush_va()
254 xen_map(uint64_t pte, caddr_t va) in xen_map() argument
256 if (HYPERVISOR_update_va_mapping((uintptr_t)va, pte, in xen_map()
442 uintptr_t va; in htable_steal_active() local
473 for (e = 0, va = ht->ht_vaddr; in htable_steal_active()
[all …]
/titanic_44/usr/src/lib/libumem/common/
H A Dmisc.c177 va_list va; in log_message() local
179 va_start(va, format); in log_message()
180 (void) vsnprintf(buf, UMEM_MAX_ERROR_SIZE-1, format, va); in log_message()
181 va_end(va); in log_message()
197 va_list va; in debug_printf() local
199 va_start(va, format); in debug_printf()
200 (void) vsnprintf(buf, UMEM_MAX_ERROR_SIZE-1, format, va); in debug_printf()
201 va_end(va); in debug_printf()
208 umem_vprintf(const char *format, va_list va) in umem_vprintf() argument
212 (void) vsnprintf(buf, UMEM_MAX_ERROR_SIZE-1, format, va); in umem_vprintf()
[all …]
H A Dumem_fail.c107 va_list va; in umem_panic() local
109 va_start(va, format); in umem_panic()
110 umem_vprintf(format, va); in umem_panic()
111 va_end(va); in umem_panic()
124 va_list va; in umem_err_recoverable() local
126 va_start(va, format); in umem_err_recoverable()
127 umem_vprintf(format, va); in umem_err_recoverable()
128 va_end(va); in umem_err_recoverable()
/titanic_44/usr/src/uts/common/fs/nfs/
H A Dnfs_acl_srv.c84 vattr_t va; in acl2_getacl() local
123 va.va_mask = AT_ALL; in acl2_getacl()
124 error = rfs4_delegated_getattr(vp, &va, 0, cr); in acl2_getacl()
130 error = vattr_to_nattr(&va, &resp->resok.attr); in acl2_getacl()
198 vattr_t va; in acl2_setacl() local
221 va.va_mask = AT_ALL; in acl2_setacl()
222 error = rfs4_delegated_getattr(vp, &va, 0, cr); in acl2_setacl()
229 error = vattr_to_nattr(&va, &resp->resok.attr); in acl2_setacl()
253 vattr_t va; in acl2_getattr() local
261 va.va_mask = AT_ALL; in acl2_getattr()
[all …]
H A Dnfs_srv.c103 struct vattr va; in rfs_getattr() local
114 va.va_mask = AT_ALL; /* we want all the attributes */ in rfs_getattr()
116 error = rfs4_delegated_getattr(vp, &va, 0, cr); in rfs_getattr()
122 va.va_type = VLNK; in rfs_getattr()
124 acl_perm(vp, exi, &va, cr); in rfs_getattr()
125 error = vattr_to_nattr(&va, &ns->ns_attr); in rfs_getattr()
152 struct vattr va; in rfs_setattr() local
170 error = sattr_to_vattr(&args->saa_sa, &va); in rfs_setattr()
189 if (va.va_mask & AT_MTIME) { in rfs_setattr()
190 if (va.va_mtime.tv_nsec == 1000000000) { in rfs_setattr()
[all …]
H A Dnfs3_srv.c96 struct vattr va; in rfs3_getattr() local
108 va.va_mask = AT_ALL; in rfs3_getattr()
109 error = rfs4_delegated_getattr(vp, &va, 0, cr); in rfs3_getattr()
114 va.va_type = VLNK; in rfs3_getattr()
117 error = vattr_to_fattr3(&va, &resp->resok.obj_attributes); in rfs3_getattr()
374 struct vattr va; in rfs3_lookup() local
529 va.va_mask = AT_ALL; in rfs3_lookup()
530 vap = rfs4_delegated_getattr(vp, &va, 0, cr) ? NULL : &va; in rfs3_lookup()
583 struct vattr va; in rfs3_access() local
618 va.va_mask = AT_MODE; in rfs3_access()
[all …]
/titanic_44/usr/src/stand/lib/fs/nfs/
H A Dnfsops.c371 struct vattr va; in boot_nfs_fstat() local
388 bzero((char *)&va, sizeof (va)); in boot_nfs_fstat()
389 va.va_mask = AT_TYPE | AT_SIZE | AT_MODE | AT_NODEID | in boot_nfs_fstat()
394 status = nfsgetattr(&filep->file, &va); in boot_nfs_fstat()
397 status = nfs3getattr(&filep->file, &va); in boot_nfs_fstat()
400 status = nfs4getattr(&filep->file, &va); in boot_nfs_fstat()
412 if (va.va_size > (u_offset_t)MAXOFF_T) { in boot_nfs_fstat()
416 stp->st_size = (off_t)va.va_size; in boot_nfs_fstat()
417 stp->st_mode = VTTOIF(va.va_type) | va.va_mode; in boot_nfs_fstat()
418 stp->st_atim.tv_sec = va.va_atime.tv_sec; in boot_nfs_fstat()
[all …]
/titanic_44/usr/src/cmd/svc/svccfg/
H A Dsvccfg_main.c97 vmessage(const char *fmt, va_list va) in vmessage() argument
111 if (vfprintf(strm, fmt, va) < 0 && interactive) in vmessage()
126 va_list va; in warn() local
128 va_start(va, fmt); in warn()
129 vmessage(fmt, va); in warn()
130 va_end(va); in warn()
157 va_list va; in semerr() local
159 va_start(va, fmt); in semerr()
160 vmessage(fmt, va); in semerr()
161 va_end(va); in semerr()
/titanic_44/usr/src/uts/i86pc/os/
H A Dppage.c57 caddr_t va; in ppmapin() local
59 va = vmem_alloc(heap_arena, PAGESIZE, VM_SLEEP); in ppmapin()
60 hat_memload(kas.a_hat, va, pp, vprot | HAT_NOSYNC, HAT_LOAD_LOCK); in ppmapin()
61 return (va); in ppmapin()
65 ppmapout(caddr_t va) in ppmapout() argument
67 hat_unload(kas.a_hat, va, PAGESIZE, HAT_UNLOAD_UNLOCK); in ppmapout()
68 vmem_free(heap_arena, va, PAGESIZE); in ppmapout()
/titanic_44/usr/src/uts/i86xpv/os/
H A Dxpv_panic.c189 uintptr_t va = *vaddr; in xpv_va_walk() local
199 if (va != lastva + MMU_PAGESIZE) in xpv_va_walk()
205 while (va < xpv_end && va >= *vaddr) { in xpv_va_walk()
223 idx = (va >> LEVEL_SHIFT(l)) & (xpv_panic_nptes[l] - 1); in xpv_va_walk()
224 scan_va = va; in xpv_va_walk()
241 va = NEXT_ENTRY_VA(va, l + 1); in xpv_va_walk()
245 va = scan_va; in xpv_va_walk()
249 if (va >= xpv_end || va < *vaddr) in xpv_va_walk()
266 (va & MMU_PAGEMASK)) { in xpv_va_walk()
267 va += MMU_PAGESIZE; in xpv_va_walk()
[all …]
/titanic_44/usr/src/uts/sun4v/vm/
H A Dmach_sfmmu.c203 caddr_t va = ktsb_base; in sfmmu_remap_kernel() local
207 ASSERT(va >= datava + MMU_PAGESIZE4M); in sfmmu_remap_kernel()
210 ASSERT(IS_P2ALIGNED(va, tsbsz)); in sfmmu_remap_kernel()
214 pfn = va_to_pfn(va); in sfmmu_remap_kernel()
228 sfmmu_tteload(kas.a_hat, &tte, va, NULL, flags); in sfmmu_remap_kernel()
230 va += MMU_PAGESIZE4M; in sfmmu_remap_kernel()
253 caddr_t va = ktsb_base; in sfmmu_set_tlb() local
259 (void) hv_mmu_map_perm_addr(va, KCONTEXT, tte, in sfmmu_set_tlb()
261 va += MMU_PAGESIZE4M; in sfmmu_set_tlb()
289 kdi_tlb_page_lock(caddr_t va, int do_dtlb) in kdi_tlb_page_lock() argument
[all …]
/titanic_44/usr/src/uts/common/io/i40e/
H A Di40e_osdep.c28 mem->va = kmem_zalloc(size, KM_SLEEP); in i40e_allocate_virt_mem()
37 if (mem->va != NULL) in i40e_free_virt_mem()
38 kmem_free(mem->va, mem->size); in i40e_free_virt_mem()
81 NULL, (caddr_t *)&mem->va, &len, &mem->idm_acc_handle); in i40e_allocate_dma_mem()
84 mem->va = NULL; in i40e_allocate_dma_mem()
94 bzero(mem->va, len); in i40e_allocate_dma_mem()
96 rc = ddi_dma_addr_bind_handle(mem->idm_dma_handle, NULL, mem->va, len, in i40e_allocate_dma_mem()
104 mem->va = NULL; in i40e_allocate_dma_mem()
149 mem->va = NULL; in i40e_free_dma_mem()
161 ASSERT(mem->va == NULL); in i40e_free_dma_mem()
/titanic_44/usr/src/uts/i86pc/boot/
H A Dboot_mmu.c60 vatoindex(uint64_t va, uint_t level) in vatoindex() argument
62 return ((va >> shift_amt[level]) & (ptes_per_table - 1)); in vatoindex()
70 find_pte(uint64_t va, paddr_t *pa, uint_t level, uint_t probe_only) in find_pte() argument
80 if (IN_HYPERVISOR_VA(va)) in find_pte()
92 index = vatoindex(va, l); in find_pte()
120 index = vatoindex(va, l); in find_pte()
/titanic_44/usr/src/lib/pyzfs/common/
H A Dtable.py44 va = list()
47 va.append(v)
49 self.lines.append((sortkey, va))
57 for (k, va) in self.lines:
61 line += va[i]
69 line += fmt % (mfl, va[i])
/titanic_44/usr/src/uts/sun4/io/
H A Dtrapstat.c616 caddr_t va = tcpu->tcpu_vabase; in trapstat_load_tlb() local
622 for (i = 0; i < tstat_total_pages; i++, va += MMU_PAGESIZE) { in trapstat_load_tlb()
628 sfmmu_itlb_ld_kva(va, &tte); in trapstat_load_tlb()
633 sfmmu_dtlb_ld_kva(va, &tte); in trapstat_load_tlb()
642 ret = hv_mmu_map_perm_addr(va, KCONTEXT, *(uint64_t *)&tte, in trapstat_load_tlb()
655 va = tcpu->tcpu_vabase; in trapstat_load_tlb()
657 (void) hv_mmu_unmap_perm_addr(va, in trapstat_load_tlb()
659 va += MMU_PAGESIZE4M; in trapstat_load_tlb()
672 va += MMU_PAGESIZE4M; in trapstat_load_tlb()
833 caddr_t va; in trapstat_probe_alloc() local
[all …]
/titanic_44/usr/src/uts/sun4u/io/pci/
H A Dpci_reloc.c97 pci_dvma_postrelocator(caddr_t va, uint_t len, uint_t flags, void *mpvoid, in pci_dvma_postrelocator() argument
134 offset = va - baseva; in pci_dvma_postrelocator()
139 index, (int64_t)va, (int64_t)baseva); in pci_dvma_postrelocator()
168 pci_dma_relocerr(caddr_t va, uint_t len, uint_t errorcode, void *mpvoid) in pci_dma_relocerr() argument
173 va, mpvoid); in pci_dma_relocerr()
260 pci_fdvma_prerelocator(caddr_t va, uint_t len, uint_t flags, void *mpvoid) in pci_fdvma_prerelocator() argument
274 if (va >= baseva && va < endva) in pci_fdvma_prerelocator()
281 pci_fdvma_postrelocator(caddr_t va, uint_t len, uint_t flags, void *mpvoid, in pci_fdvma_postrelocator() argument
324 if (va >= baseva && va < endva) { in pci_fdvma_postrelocator()
325 index = i + IOMMU_BTOP(va - baseva); in pci_fdvma_postrelocator()
[all …]

12345678910