Home
last modified time | relevance | path

Searched refs:ptes (Results 1 – 25 of 43) sorted by relevance

12

/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dvmmnv44.c28 dma_addr_t *list, u32 ptei, u32 ptes) in nv44_vmm_pgt_fill() argument
38 while (ptes--) { in nv44_vmm_pgt_fill()
74 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv44_vmm_pgt_pte() argument
79 const u32 pten = min(ptes, 4 - (ptei & 3)); in nv44_vmm_pgt_pte()
84 ptes -= pten; in nv44_vmm_pgt_pte()
87 while (ptes >= 4) { in nv44_vmm_pgt_pte()
94 ptes -= 4; in nv44_vmm_pgt_pte()
97 if (ptes) { in nv44_vmm_pgt_pte()
98 for (i = 0; i < ptes; i++, addr += 0x1000) in nv44_vmm_pgt_pte()
100 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); in nv44_vmm_pgt_pte()
[all …]
H A Dvmmnv41.c28 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv41_vmm_pgt_pte() argument
31 while (ptes--) { in nv41_vmm_pgt_pte()
39 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_dma() argument
50 while (ptes--) { in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument
64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap()
H A Dvmmnv04.c29 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv04_vmm_pgt_pte() argument
32 while (ptes--) { in nv04_vmm_pgt_pte()
40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
47 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_dma() argument
51 while (ptes--) in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
H A Dvmmnv50.c33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv50_vmm_pgt_pte() argument
39 map->type += ptes * map->ctag; in nv50_vmm_pgt_pte()
41 while (ptes) { in nv50_vmm_pgt_pte()
44 if (ptes >= pten && IS_ALIGNED(ptei, pten)) in nv50_vmm_pgt_pte()
50 ptes -= pten; in nv50_vmm_pgt_pte()
59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl()
66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma()
71 while (ptes--) { in nv50_vmm_pgt_dma()
[all …]
H A Dvmm.c198 const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) in nvkm_vmm_unref_sptes() argument
209 for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) { in nvkm_vmm_unref_sptes()
210 const u32 pten = min(sptn - spti, ptes); in nvkm_vmm_unref_sptes()
212 ptes -= pten; in nvkm_vmm_unref_sptes()
222 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes()
236 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes()
243 TRA(it, "LPTE %05x: U -> S %d PTEs", pteb, ptes); in nvkm_vmm_unref_sptes()
244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
251 TRA(it, "LPTE %05x: U -> I %d PTEs", pteb, ptes); in nvkm_vmm_unref_sptes()
252 pair->func->invalid(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
[all …]
H A Dvmmgf100.c33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gf100_vmm_pgt_pte() argument
39 while (ptes--) { in gf100_vmm_pgt_pte()
48 map->type += ptes * map->ctag; in gf100_vmm_pgt_pte()
50 while (ptes--) { in gf100_vmm_pgt_pte()
59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl()
66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma()
71 while (ptes--) { in gf100_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma()
[all …]
H A Dvmmgk104.c26 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gk104_vmm_lpt_invalid() argument
29 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); in gk104_vmm_lpt_invalid()
H A Dvmmgm200.c29 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gm200_vmm_pgt_sparse() argument
32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse()
/linux/arch/x86/xen/
H A Dgrant-table.c27 pte_t **ptes; member
45 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_map_shared()
67 set_pte_at(&init_mm, addr, gnttab_status_vm_area.ptes[i], in arch_gnttab_map_status()
77 pte_t **ptes; in arch_gnttab_unmap() local
82 ptes = gnttab_status_vm_area.ptes; in arch_gnttab_unmap()
84 ptes = gnttab_shared_vm_area.ptes; in arch_gnttab_unmap()
89 set_pte_at(&init_mm, addr, ptes[i], __pte(0)); in arch_gnttab_unmap()
98 area->ptes[area->idx++] = pte; in gnttab_apply()
104 area->ptes = kmalloc_objs(*area->ptes, nr_frames); in arch_gnttab_valloc()
105 if (area->ptes == NULL) in arch_gnttab_valloc()
[all …]
/linux/block/partitions/
H A Defi.c335 gpt_header **gpt, gpt_entry **ptes) in is_gpt_valid() argument
340 if (!ptes) in is_gpt_valid()
429 if (!(*ptes = alloc_read_gpt_entries(state, *gpt))) in is_gpt_valid()
433 crc = efi_crc32((const unsigned char *) (*ptes), pt_size); in is_gpt_valid()
444 kfree(*ptes); in is_gpt_valid()
445 *ptes = NULL; in is_gpt_valid()
581 gpt_entry **ptes) in find_valid_gpt() argument
592 if (!ptes) in find_valid_gpt()
642 *ptes = pptes; in find_valid_gpt()
651 *ptes = aptes; in find_valid_gpt()
[all …]
/linux/arch/alpha/kernel/
H A Dpci_iommu.c76 arena->ptes = memblock_alloc_or_panic(mem_size, align); in iommu_arena_new_node()
103 unsigned long *ptes; in iommu_arena_find_pages() local
113 ptes = arena->ptes; in iommu_arena_find_pages()
125 if (ptes[p+i]) { in iommu_arena_find_pages()
159 unsigned long *ptes; in iommu_arena_alloc() local
165 ptes = arena->ptes; in iommu_arena_alloc()
178 ptes[p+i] = IOMMU_INVALID_PTE; in iommu_arena_alloc()
192 p = arena->ptes + ofs; in iommu_arena_free()
288 arena->ptes[i + dma_ofs] = mk_iommu_pte(paddr); in pci_map_single_1()
532 unsigned long *ptes; in sg_fill() local
[all …]
H A Dcore_titan.c328 port->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in titan_init_one_pachip_port()
336 port->tba[2].csr = virt_to_phys(hose->sg_pci->ptes); in titan_init_one_pachip_port()
463 unsigned long *ptes; in titan_ioremap() local
518 ptes = hose->sg_pci->ptes; in titan_ioremap()
522 pfn = ptes[baddr >> PAGE_SHIFT]; in titan_ioremap()
711 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in titan_agp_translate()
H A Dcore_marvel.c292 csrs->POx_TBASE[0].csr = virt_to_phys(hose->sg_isa->ptes); in io7_init_hose()
309 csrs->POx_TBASE[2].csr = virt_to_phys(hose->sg_pci->ptes); in io7_init_hose()
687 unsigned long *ptes; in marvel_ioremap() local
742 ptes = hose->sg_pci->ptes; in marvel_ioremap()
746 pfn = ptes[baddr >> PAGE_SHIFT]; in marvel_ioremap()
1000 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in marvel_agp_translate()
H A Dpci_impl.h139 unsigned long *ptes; member
H A Dcore_tsunami.c337 pchip->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in tsunami_init_one_pchip()
341 pchip->tba[1].csr = virt_to_phys(hose->sg_pci->ptes); in tsunami_init_one_pchip()
/linux/arch/powerpc/mm/ptdump/
H A Dhashpagetable.c250 } ptes[4]; in pseries_find() local
268 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); in pseries_find()
273 if (HPTE_V_COMPARE(ptes[j].v, want_v) && in pseries_find()
274 (ptes[j].v & HPTE_V_VALID)) { in pseries_find()
276 *v = ptes[j].v; in pseries_find()
277 *r = ptes[j].r; in pseries_find()
/linux/drivers/gpu/drm/xe/
H A Dxe_sriov_vf_ccs.c123 u64 sys_mem_size, ccs_mem_size, ptes, bb_pool_size; in get_ccs_bb_pool_size() local
129 ptes = DIV_ROUND_UP_ULL(sys_mem_size + ccs_mem_size, XE_PAGE_SIZE); in get_ccs_bb_pool_size()
137 bb_pool_size = ptes * sizeof(u32); in get_ccs_bb_pool_size()
H A Dxe_migrate.c609 u32 ptes; in emit_pte() local
620 ptes = DIV_ROUND_UP(size, XE_PAGE_SIZE); in emit_pte()
622 while (ptes) { in emit_pte()
623 u32 chunk = min(MAX_PTE_PER_SDI, ptes); in emit_pte()
631 ptes -= chunk; in emit_pte()
1806 u32 ptes, ofs; in __xe_migrate_update_pgtables() local
1835 ptes = num_updates; in __xe_migrate_update_pgtables()
1837 while (ptes) { in __xe_migrate_update_pgtables()
1838 u32 chunk = min(MAX_PTE_PER_SDI, ptes); in __xe_migrate_update_pgtables()
1874 ptes -= chunk; in __xe_migrate_update_pgtables()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/
H A Dgsp.c1145 gsp->shm.ptes.nr = (gsp->shm.cmdq.size + gsp->shm.msgq.size) >> GSP_PAGE_SHIFT; in r535_gsp_shared_init()
1146 gsp->shm.ptes.nr += DIV_ROUND_UP(gsp->shm.ptes.nr * sizeof(u64), GSP_PAGE_SIZE); in r535_gsp_shared_init()
1147 gsp->shm.ptes.size = ALIGN(gsp->shm.ptes.nr * sizeof(u64), GSP_PAGE_SIZE); in r535_gsp_shared_init()
1149 ret = nvkm_gsp_mem_ctor(gsp, gsp->shm.ptes.size + in r535_gsp_shared_init()
1156 gsp->shm.ptes.ptr = gsp->shm.mem.data; in r535_gsp_shared_init()
1157 gsp->shm.cmdq.ptr = (u8 *)gsp->shm.ptes.ptr + gsp->shm.ptes.size; in r535_gsp_shared_init()
1160 for (i = 0; i < gsp->shm.ptes.nr; i++) in r535_gsp_shared_init()
1161 gsp->shm.ptes.ptr[i] = gsp->shm.mem.addr + (i << GSP_PAGE_SHIFT); in r535_gsp_shared_init()
1190 args->messageQueueInitArguments.pageTableEntryCount = gsp->shm.ptes.nr; in r535_gsp_set_rmargs()
1463 static void create_pte_array(u64 *ptes, dma_addr_t addr, size_t size) in create_pte_array() argument
[all …]
/linux/arch/s390/kvm/
H A Ddat.c292 pt->ptes[i].val = init.val | i * PAGE_SIZE; in dat_split_ste()
307 pgste = dat_save_storage_key_into_pgste(pt->ptes[i], pgste); in dat_split_ste()
308 pgste_set_unlock(pt->ptes + i, pgste); in dat_split_ste()
532 *ptepp = pgtable->ptes + vaddr.px; in dat_entry_walk()
544 if (pte_hole(READ_ONCE(table->ptes[idx]))) { in dat_pte_walk_range()
551 rc = w->ops->pte_entry(table->ptes + idx, gfn, gfn + 1, w); in dat_pte_walk_range()
964 while (!pgste_get_trylock_multiple(table->ptes + param.offset, n, pgstes)) in dat_get_ptval()
970 pgste_set_unlock_multiple(table->ptes + param.offset, n, pgstes); in dat_get_ptval()
981 while (!pgste_get_trylock_multiple(table->ptes + param.offset, n, pgstes)) in dat_set_ptval()
989 pgste_set_unlock_multiple(table->ptes + param.offset, n, pgstes); in dat_set_ptval()
H A Ddat.h339 union pte ptes[_PAGE_ENTRIES]; member
633 static inline void dat_init_page_table(struct page_table *pt, unsigned long ptes, in dat_init_page_table() argument
636 memset64((void *)pt->ptes, ptes, PTRS_PER_PTE); in dat_init_page_table()
/linux/arch/x86/kvm/mmu/
H A Dpaging_tmpl.h84 pt_element_t ptes[PT_MAX_FULL_LEVELS]; member
213 pte = orig_pte = walker->ptes[level - 1]; in FNAME()
254 walker->ptes[level - 1] = pte; in FNAME()
425 walker->ptes[walker->level - 1] = pte; in FNAME()
568 return r || curr_pte != gw->ptes[level - 1]; in FNAME()
/linux/arch/arm64/kernel/pi/
H A Dmap_kernel.c202 static u8 ptes[INIT_IDMAP_FDT_SIZE] __initdata __aligned(PAGE_SIZE); in map_fdt() local
204 phys_addr_t ptep = (phys_addr_t)ptes; /* We're idmapped when called */ in map_fdt()
/linux/drivers/gpu/drm/msm/
H A Dmsm_iommu.c275 msm_iommu_pagetable_walk(struct msm_mmu *mmu, unsigned long iova, uint64_t ptes[4]) in msm_iommu_pagetable_walk()
290 for (int i = 0; i < ARRAY_SIZE(wd.ptes); i++) in msm_iommu_pagetable_walk()
291 ptes[i] = wd.ptes[i]; in msm_iommu_pagetable_walk()
H A Dmsm_mmu.h92 int msm_iommu_pagetable_walk(struct msm_mmu *mmu, unsigned long iova, uint64_t ptes[4]);

12