Home
last modified time | relevance | path

Searched refs:mmu (Results 1 – 25 of 255) sorted by relevance

1234567891011

/linux/drivers/staging/media/ipu3/
H A Dipu3-mmu.c78 static void imgu_mmu_tlb_invalidate(struct imgu_mmu *mmu) in imgu_mmu_tlb_invalidate() argument
80 writel(TLB_INVALIDATE, mmu->base + REG_TLB_INVALIDATE); in imgu_mmu_tlb_invalidate()
83 static void call_if_imgu_is_powered(struct imgu_mmu *mmu, in call_if_imgu_is_powered() argument
84 void (*func)(struct imgu_mmu *mmu)) in call_if_imgu_is_powered() argument
86 if (!pm_runtime_get_if_in_use(mmu->dev)) in call_if_imgu_is_powered()
89 func(mmu); in call_if_imgu_is_powered()
90 pm_runtime_put(mmu->dev); in call_if_imgu_is_powered()
101 static void imgu_mmu_set_halt(struct imgu_mmu *mmu, bool halt) in imgu_mmu_set_halt() argument
106 writel(halt, mmu->base + REG_GP_HALT); in imgu_mmu_set_halt()
107 ret = readl_poll_timeout(mmu->base + REG_GP_HALTED, in imgu_mmu_set_halt()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dbase.c42 nvkm_mmu_ptp_put(struct nvkm_mmu *mmu, bool force, struct nvkm_mmu_pt *pt) in nvkm_mmu_ptp_put() argument
51 list_add(&ptp->head, &mmu->ptp.list); in nvkm_mmu_ptp_put()
56 nvkm_mmu_ptc_put(mmu, force, &ptp->pt); in nvkm_mmu_ptp_put()
65 nvkm_mmu_ptp_get(struct nvkm_mmu *mmu, u32 size, bool zero) in nvkm_mmu_ptp_get() argument
74 ptp = list_first_entry_or_null(&mmu->ptp.list, typeof(*ptp), head); in nvkm_mmu_ptp_get()
82 ptp->pt = nvkm_mmu_ptc_get(mmu, 0x1000, 0x1000, false); in nvkm_mmu_ptp_get()
93 list_add(&ptp->head, &mmu->ptp.list); in nvkm_mmu_ptp_get()
120 nvkm_mmu_ptc_find(struct nvkm_mmu *mmu, u32 size) in nvkm_mmu_ptc_find() argument
124 list_for_each_entry(ptc, &mmu->ptc.list, head) { in nvkm_mmu_ptc_find()
134 list_add(&ptc->head, &mmu->ptc.list); in nvkm_mmu_ptc_find()
[all …]
H A DKbuild2 nvkm-y += nvkm/subdev/mmu/base.o
3 nvkm-y += nvkm/subdev/mmu/nv04.o
4 nvkm-y += nvkm/subdev/mmu/nv41.o
5 nvkm-y += nvkm/subdev/mmu/nv44.o
6 nvkm-y += nvkm/subdev/mmu/nv50.o
7 nvkm-y += nvkm/subdev/mmu/g84.o
8 nvkm-y += nvkm/subdev/mmu/mcp77.o
9 nvkm-y += nvkm/subdev/mmu/gf100.o
10 nvkm-y += nvkm/subdev/mmu/gk104.o
11 nvkm-y += nvkm/subdev/mmu/gk20a.o
[all …]
H A Dummu.c35 struct nvkm_mmu *mmu = nvkm_ummu(object)->mmu; in nvkm_ummu_sclass() local
37 if (mmu->func->mem.user.oclass) { in nvkm_ummu_sclass()
39 oclass->base = mmu->func->mem.user; in nvkm_ummu_sclass()
45 if (mmu->func->vmm.user.oclass) { in nvkm_ummu_sclass()
47 oclass->base = mmu->func->vmm.user; in nvkm_ummu_sclass()
59 struct nvkm_mmu *mmu = ummu->mmu; in nvkm_ummu_heap() local
67 if ((index = args->v0.index) >= mmu->heap_nr) in nvkm_ummu_heap()
69 args->v0.size = mmu->heap[index].size; in nvkm_ummu_heap()
79 struct nvkm_mmu *mmu = ummu->mmu; in nvkm_ummu_type() local
87 if ((index = args->v0.index) >= mmu->type_nr) in nvkm_ummu_type()
[all …]
H A Dumem.c72 struct nvkm_device *device = umem->mmu->subdev.device; in nvkm_umem_unmap()
90 struct nvkm_mmu *mmu = umem->mmu; in nvkm_umem_map() local
109 int ret = mmu->func->mem.umap(mmu, umem->memory, argv, argc, in nvkm_umem_map()
145 struct nvkm_mmu *mmu = nvkm_ummu(oclass->parent)->mmu; in nvkm_umem_new() local
161 if (type >= mmu->type_nr) in nvkm_umem_new()
167 umem->mmu = mmu; in nvkm_umem_new()
168 umem->type = mmu->type[type].type; in nvkm_umem_new()
172 if (mmu->type[type].type & NVKM_MEM_MAPPABLE) { in nvkm_umem_new()
177 ret = nvkm_mem_new_type(mmu, type, page, size, argv, argc, in nvkm_umem_new()
H A Dmem.c33 struct nvkm_mmu *mmu; member
88 dma_unmap_page(mem->mmu->subdev.device->dev, in nvkm_mem_dtor()
144 nvkm_mem_new_host(struct nvkm_mmu *mmu, int type, u8 page, u64 size, in nvkm_mem_new_host() argument
147 struct device *dev = mmu->subdev.device->dev; in nvkm_mem_new_host()
157 if ( (mmu->type[type].type & NVKM_MEM_COHERENT) && in nvkm_mem_new_host()
158 !(mmu->type[type].type & NVKM_MEM_UNCACHED)) in nvkm_mem_new_host()
169 mem->mmu = mmu; in nvkm_mem_new_host()
199 if (mmu->dma_bits > 32) in nvkm_mem_new_host()
209 mem->dma[mem->pages] = dma_map_page(mmu->subdev.device->dev, in nvkm_mem_new_host()
224 nvkm_mem_new_type(struct nvkm_mmu *mmu, int type, u8 page, u64 size, in nvkm_mem_new_type() argument
[all …]
/linux/drivers/gpu/drm/nouveau/nvif/
H A Dmmu.c28 nvif_mmu_dtor(struct nvif_mmu *mmu) in nvif_mmu_dtor() argument
30 if (!nvif_object_constructed(&mmu->object)) in nvif_mmu_dtor()
33 kfree(mmu->kind); in nvif_mmu_dtor()
34 kfree(mmu->type); in nvif_mmu_dtor()
35 kfree(mmu->heap); in nvif_mmu_dtor()
36 nvif_object_dtor(&mmu->object); in nvif_mmu_dtor()
41 struct nvif_mmu *mmu) in nvif_mmu_ctor() argument
53 mmu->heap = NULL; in nvif_mmu_ctor()
54 mmu->type = NULL; in nvif_mmu_ctor()
55 mmu->kind = NULL; in nvif_mmu_ctor()
[all …]
H A Dmem.c28 nvif_mem_ctor_map(struct nvif_mmu *mmu, const char *name, u8 type, u64 size, in nvif_mem_ctor_map() argument
31 int ret = nvif_mem_ctor(mmu, name, mmu->mem, NVIF_MEM_MAPPABLE | type, in nvif_mem_ctor_map()
48 nvif_mem_ctor_type(struct nvif_mmu *mmu, const char *name, s32 oclass, in nvif_mem_ctor_type() argument
72 ret = nvif_object_ctor(&mmu->object, name ? name : "nvifMem", 0, oclass, in nvif_mem_ctor_type()
75 mem->type = mmu->type[type].type; in nvif_mem_ctor_type()
88 nvif_mem_ctor(struct nvif_mmu *mmu, const char *name, s32 oclass, u8 type, in nvif_mem_ctor() argument
95 for (i = 0; ret && i < mmu->type_nr; i++) { in nvif_mem_ctor()
96 if ((mmu->type[i].type & type) == type) { in nvif_mem_ctor()
97 ret = nvif_mem_ctor_type(mmu, name, oclass, i, page, in nvif_mem_ctor()
/linux/drivers/iommu/
H A Dipmmu-vmsa.c71 struct ipmmu_vmsa_device *mmu; member
149 static bool ipmmu_is_root(struct ipmmu_vmsa_device *mmu) in ipmmu_is_root() argument
151 return mmu->root == mmu; in ipmmu_is_root()
156 struct ipmmu_vmsa_device *mmu = dev_get_drvdata(dev); in __ipmmu_check_device() local
159 if (ipmmu_is_root(mmu)) in __ipmmu_check_device()
160 *rootp = mmu; in __ipmmu_check_device()
177 static u32 ipmmu_read(struct ipmmu_vmsa_device *mmu, unsigned int offset) in ipmmu_read() argument
179 return ioread32(mmu->base + offset); in ipmmu_read()
182 static void ipmmu_write(struct ipmmu_vmsa_device *mmu, unsigned int offset, in ipmmu_write() argument
185 iowrite32(data, mmu->base + offset); in ipmmu_write()
[all …]
/linux/drivers/media/pci/intel/ipu6/
H A Dipu6-mmu.c54 static void tlb_invalidate(struct ipu6_mmu *mmu) in tlb_invalidate() argument
59 spin_lock_irqsave(&mmu->ready_lock, flags); in tlb_invalidate()
60 if (!mmu->ready) { in tlb_invalidate()
61 spin_unlock_irqrestore(&mmu->ready_lock, flags); in tlb_invalidate()
65 for (i = 0; i < mmu->nr_mmus; i++) { in tlb_invalidate()
74 if (mmu->mmu_hw[i].insert_read_before_invalidate) in tlb_invalidate()
75 readl(mmu->mmu_hw[i].base + REG_L1_PHYS); in tlb_invalidate()
77 writel(0xffffffff, mmu->mmu_hw[i].base + in tlb_invalidate()
87 spin_unlock_irqrestore(&mmu->ready_lock, flags); in tlb_invalidate()
414 static int allocate_trash_buffer(struct ipu6_mmu *mmu) in allocate_trash_buffer() argument
[all …]
H A Dipu6-dma.c29 static struct vm_info *get_vm_info(struct ipu6_mmu *mmu, dma_addr_t iova) in get_vm_info() argument
33 list_for_each_entry_safe(info, save, &mmu->vma_list, list) { in get_vm_info()
120 struct ipu6_mmu *mmu = sys->mmu; in ipu6_dma_sync_single() local
122 info = get_vm_info(mmu, dma_handle); in ipu6_dma_sync_single()
159 struct ipu6_mmu *mmu = sys->mmu; in ipu6_dma_alloc() local
174 iova = alloc_iova(&mmu->dmap->iovad, count, in ipu6_dma_alloc()
196 ret = ipu6_mmu_map(mmu->dmap->mmu_info, in ipu6_dma_alloc()
218 list_add(&info->list, &mmu->vma_list); in ipu6_dma_alloc()
225 pci_dma_addr = ipu6_mmu_iova_to_phys(mmu->dmap->mmu_info, in ipu6_dma_alloc()
230 ipu6_mmu_unmap(mmu->dmap->mmu_info, ipu6_iova, PAGE_SIZE); in ipu6_dma_alloc()
[all …]
/linux/arch/arm64/kvm/hyp/nvhe/
H A Dtlb.c14 struct kvm_s2_mmu *mmu; member
19 static void enter_vmid_context(struct kvm_s2_mmu *mmu, in enter_vmid_context() argument
23 struct kvm_s2_mmu *host_s2_mmu = &host_mmu.arch.mmu; in enter_vmid_context()
29 cxt->mmu = NULL; in enter_vmid_context()
62 if (mmu == vcpu->arch.hw_mmu || WARN_ON(mmu != host_s2_mmu)) in enter_vmid_context()
65 cxt->mmu = vcpu->arch.hw_mmu; in enter_vmid_context()
68 if (mmu == host_s2_mmu) in enter_vmid_context()
71 cxt->mmu = host_s2_mmu; in enter_vmid_context()
113 __load_stage2(mmu, kern_hyp_va(mmu->arch)); in enter_vmid_context()
120 struct kvm_s2_mmu *mmu = cxt->mmu; in exit_vmid_context() local
[all …]
/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_mmu.c113 struct panfrost_mmu *mmu, in mmu_hw_do_operation() argument
119 ret = mmu_hw_do_operation_locked(pfdev, mmu->as, iova, size, op); in mmu_hw_do_operation()
124 static void panfrost_mmu_enable(struct panfrost_device *pfdev, struct panfrost_mmu *mmu) in panfrost_mmu_enable() argument
126 int as_nr = mmu->as; in panfrost_mmu_enable()
127 struct io_pgtable_cfg *cfg = &mmu->pgtbl_cfg; in panfrost_mmu_enable()
158 u32 panfrost_mmu_as_get(struct panfrost_device *pfdev, struct panfrost_mmu *mmu) in panfrost_mmu_as_get() argument
164 as = mmu->as; in panfrost_mmu_as_get()
166 int en = atomic_inc_return(&mmu->as_count); in panfrost_mmu_as_get()
175 list_move(&mmu->list, &pfdev->as_lru_list); in panfrost_mmu_as_get()
185 panfrost_mmu_enable(pfdev, mmu); in panfrost_mmu_as_get()
[all …]
/linux/arch/arm64/kvm/hyp/vhe/
H A Dtlb.c14 struct kvm_s2_mmu *mmu; member
20 static void enter_vmid_context(struct kvm_s2_mmu *mmu, in enter_vmid_context() argument
28 if (vcpu && mmu != vcpu->arch.hw_mmu) in enter_vmid_context()
29 cxt->mmu = vcpu->arch.hw_mmu; in enter_vmid_context()
31 cxt->mmu = NULL; in enter_vmid_context()
63 __load_stage2(mmu, mmu->arch); in enter_vmid_context()
80 if (cxt->mmu) in exit_vmid_context()
81 __load_stage2(cxt->mmu, cxt->mmu->arch); in exit_vmid_context()
92 void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa() argument
100 enter_vmid_context(mmu, &cxt); in __kvm_tlb_flush_vmid_ipa()
[all …]
/linux/drivers/gpu/drm/msm/
H A Dmsm_mmu.h13 void (*detach)(struct msm_mmu *mmu);
14 int (*map)(struct msm_mmu *mmu, uint64_t iova, struct sg_table *sgt,
16 int (*unmap)(struct msm_mmu *mmu, uint64_t iova, size_t len);
17 void (*destroy)(struct msm_mmu *mmu);
18 void (*resume_translation)(struct msm_mmu *mmu);
35 static inline void msm_mmu_init(struct msm_mmu *mmu, struct device *dev, in msm_mmu_init() argument
38 mmu->dev = dev; in msm_mmu_init()
39 mmu->funcs = funcs; in msm_mmu_init()
40 mmu->type = type; in msm_mmu_init()
46 static inline void msm_mmu_set_fault_handler(struct msm_mmu *mmu, void *arg, in msm_mmu_set_fault_handler() argument
[all …]
H A Dmsm_iommu.c30 static struct msm_iommu_pagetable *to_pagetable(struct msm_mmu *mmu) in to_pagetable() argument
32 return container_of(mmu, struct msm_iommu_pagetable, base); in to_pagetable()
91 static int msm_iommu_pagetable_unmap(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_unmap() argument
94 struct msm_iommu_pagetable *pagetable = to_pagetable(mmu); in msm_iommu_pagetable_unmap()
115 static int msm_iommu_pagetable_map(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_map() argument
118 struct msm_iommu_pagetable *pagetable = to_pagetable(mmu); in msm_iommu_pagetable_map()
145 msm_iommu_pagetable_unmap(mmu, iova, addr - iova); in msm_iommu_pagetable_map()
154 static void msm_iommu_pagetable_destroy(struct msm_mmu *mmu) in msm_iommu_pagetable_destroy() argument
156 struct msm_iommu_pagetable *pagetable = to_pagetable(mmu); in msm_iommu_pagetable_destroy()
172 int msm_iommu_pagetable_params(struct msm_mmu *mmu, in msm_iommu_pagetable_params() argument
[all …]
H A Dmsm_gem_vma.c19 if (aspace->mmu) in msm_gem_address_space_destroy()
20 aspace->mmu->funcs->destroy(aspace->mmu); in msm_gem_address_space_destroy()
51 aspace->mmu->funcs->unmap(aspace->mmu, vma->iova, size); in msm_gem_vma_purge()
84 ret = aspace->mmu->funcs->map(aspace->mmu, vma->iova, sgt, size, prot); in msm_gem_vma_map()
154 msm_gem_address_space_create(struct msm_mmu *mmu, const char *name, in msm_gem_address_space_create() argument
159 if (IS_ERR(mmu)) in msm_gem_address_space_create()
160 return ERR_CAST(mmu); in msm_gem_address_space_create()
168 aspace->mmu = mmu; in msm_gem_address_space_create()
/linux/arch/arm64/include/asm/
H A Dkvm_mmu.h12 #include <asm/mmu.h>
150 #define kvm_phys_shift(mmu) VTCR_EL2_IPA((mmu)->vtcr) argument
151 #define kvm_phys_size(mmu) (_AC(1, ULL) << kvm_phys_shift(mmu))
152 #define kvm_phys_mask(mmu) (kvm_phys_size(mmu) - _AC(1, ULL))
171 void kvm_stage2_unmap_range(struct kvm_s2_mmu *mmu, phys_addr_t start,
173 void kvm_stage2_flush_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end);
174 void kvm_stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_
148 kvm_phys_shift(mmu) global() argument
149 kvm_phys_size(mmu) global() argument
301 kvm_get_vttbr(struct kvm_s2_mmu * mmu) kvm_get_vttbr() argument
317 __load_stage2(struct kvm_s2_mmu * mmu,struct kvm_arch * arch) __load_stage2() argument
331 kvm_s2_mmu_to_kvm(struct kvm_s2_mmu * mmu) kvm_s2_mmu_to_kvm() argument
342 kvm_s2_mmu_valid(struct kvm_s2_mmu * mmu) kvm_s2_mmu_valid() argument
347 kvm_is_nested_s2_mmu(struct kvm * kvm,struct kvm_s2_mmu * mmu) kvm_is_nested_s2_mmu() argument
[all...]
/linux/arch/arc/mm/
H A Dtlb.c136 struct cpuinfo_arc_mmu *mmu = &mmuinfo; in local_flush_tlb_all() local
139 int num_tlb = mmu->sets * mmu->ways; in local_flush_tlb_all()
565 struct cpuinfo_arc_mmu *mmu = &mmuinfo; in arc_mmu_mumbojumbo() local
573 mmu->ver = (bcr >> 24); in arc_mmu_mumbojumbo()
575 if (is_isa_arcompact() && mmu->ver == 3) { in arc_mmu_mumbojumbo()
577 mmu->pg_sz_k = 1 << (mmu3->pg_sz - 1); in arc_mmu_mumbojumbo()
578 mmu->sets = 1 << mmu3->sets; in arc_mmu_mumbojumbo()
579 mmu->ways = 1 << mmu3->ways; in arc_mmu_mumbojumbo()
585 mmu->pg_sz_k = 1 << (mmu4->sz0 - 1); in arc_mmu_mumbojumbo()
586 mmu->s_pg_sz_m = 1 << (mmu4->sz1 - 11); in arc_mmu_mumbojumbo()
[all …]
/linux/drivers/xen/
H A Dgrant-dma-iommu.c36 struct grant_dma_iommu_device *mmu; in grant_dma_iommu_probe() local
39 mmu = devm_kzalloc(&pdev->dev, sizeof(*mmu), GFP_KERNEL); in grant_dma_iommu_probe()
40 if (!mmu) in grant_dma_iommu_probe()
43 mmu->dev = &pdev->dev; in grant_dma_iommu_probe()
45 ret = iommu_device_register(&mmu->iommu, &grant_dma_iommu_ops, &pdev->dev); in grant_dma_iommu_probe()
49 platform_set_drvdata(pdev, mmu); in grant_dma_iommu_probe()
56 struct grant_dma_iommu_device *mmu = platform_get_drvdata(pdev); in grant_dma_iommu_remove() local
59 iommu_device_unregister(&mmu->iommu); in grant_dma_iommu_remove()
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_mem.c93 struct nvif_mmu *mmu = &drm->mmu; in nouveau_mem_host() local
103 if (mem->kind && !(mmu->type[type].type & NVIF_MEM_KIND)) in nouveau_mem_host()
105 if (mem->comp && !(mmu->type[type].type & NVIF_MEM_COMP)) { in nouveau_mem_host()
106 if (mmu->object.oclass >= NVIF_CLASS_MMU_GF100) in nouveau_mem_host()
107 mem->kind = mmu->kind[mem->kind]; in nouveau_mem_host()
117 ret = nvif_mem_ctor_type(mmu, "ttmHostMem", mmu->mem, type, PAGE_SHIFT, in nouveau_mem_host()
129 struct nvif_mmu *mmu = &drm->mmu; in nouveau_mem_vram() local
134 switch (mmu->mem) { in nouveau_mem_vram()
136 ret = nvif_mem_ctor_type(mmu, "ttmVram", mmu->mem, in nouveau_mem_vram()
144 ret = nvif_mem_ctor_type(mmu, "ttmVram", mmu->mem, in nouveau_mem_vram()
[all …]
/linux/drivers/gpu/drm/msm/adreno/
H A Da2xx_gpummu.c27 static void a2xx_gpummu_detach(struct msm_mmu *mmu) in a2xx_gpummu_detach() argument
31 static int a2xx_gpummu_map(struct msm_mmu *mmu, uint64_t iova, in a2xx_gpummu_map() argument
34 struct a2xx_gpummu *gpummu = to_a2xx_gpummu(mmu); in a2xx_gpummu_map()
59 static int a2xx_gpummu_unmap(struct msm_mmu *mmu, uint64_t iova, size_t len) in a2xx_gpummu_unmap() argument
61 struct a2xx_gpummu *gpummu = to_a2xx_gpummu(mmu); in a2xx_gpummu_unmap()
74 static void a2xx_gpummu_resume_translation(struct msm_mmu *mmu) in a2xx_gpummu_resume_translation() argument
78 static void a2xx_gpummu_destroy(struct msm_mmu *mmu) in a2xx_gpummu_destroy() argument
80 struct a2xx_gpummu *gpummu = to_a2xx_gpummu(mmu); in a2xx_gpummu_destroy()
82 dma_free_attrs(mmu->dev, TABLE_SIZE, gpummu->table, gpummu->pt_base, in a2xx_gpummu_destroy()
117 void a2xx_gpummu_params(struct msm_mmu *mmu, dma_addr_t *pt_base, in a2xx_gpummu_params() argument
[all …]
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_mmu.c574 lockdep_assert_held(&ptdev->mmu->as.slots_lock); in mmu_hw_do_operation_locked()
601 mutex_lock(&ptdev->mmu->as.slots_lock); in mmu_hw_do_operation()
603 mutex_unlock(&ptdev->mmu->as.slots_lock); in mmu_hw_do_operation()
686 lockdep_assert_held(&ptdev->mmu->as.slots_lock); in panthor_vm_release_as_locked()
691 ptdev->mmu->as.slots[vm->as.id].vm = NULL; in panthor_vm_release_as_locked()
692 clear_bit(vm->as.id, &ptdev->mmu->as.alloc_mask); in panthor_vm_release_as_locked()
720 mutex_lock(&ptdev->mmu->as.slots_lock); in panthor_vm_active()
730 if (ptdev->mmu->as.faulty_mask & panthor_mmu_as_fault_mask(ptdev, as)) in panthor_vm_active()
738 drm_WARN_ON(&ptdev->base, ptdev->mmu->as.alloc_mask & BIT(0)); in panthor_vm_active()
741 as = ffz(ptdev->mmu->as.alloc_mask | BIT(0)); in panthor_vm_active()
[all …]
/linux/drivers/gpu/drm/nouveau/include/nvif/
H A Dmmu.h39 nvif_mmu_kind_valid(struct nvif_mmu *mmu, u8 kind) in nvif_mmu_kind_valid() argument
42 if (kind >= mmu->kind_nr || mmu->kind[kind] == mmu->kind_inv) in nvif_mmu_kind_valid()
49 nvif_mmu_type(struct nvif_mmu *mmu, u8 mask) in nvif_mmu_type() argument
52 for (i = 0; i < mmu->type_nr; i++) { in nvif_mmu_type()
53 if ((mmu->type[i].type & mask) == mask) in nvif_mmu_type()
/linux/arch/x86/kvm/mmu/
H A Dmmu.c223 static inline bool __maybe_unused is_##reg##_##name(struct kvm_mmu *mmu) \
225 return !!(mmu->cpu_role. base_or_ext . reg##_##name); \
236 static inline bool is_cr0_pg(struct kvm_mmu *mmu) in is_cr0_pg() argument
238 return mmu->cpu_role.base.level > 0; in is_cr0_pg()
241 static inline bool is_cr4_pae(struct kvm_mmu *mmu) in is_cr4_pae() argument
243 return !mmu->cpu_role.base.has_4_byte_gpte; in is_cr4_pae()
263 struct kvm_mmu *mmu) in kvm_mmu_get_guest_pgd() argument
265 if (IS_ENABLED(CONFIG_MITIGATION_RETPOLINE) && mmu->get_guest_pgd == get_guest_cr3) in kvm_mmu_get_guest_pgd()
268 return mmu->get_guest_pgd(vcpu); in kvm_mmu_get_guest_pgd()
556 return tdp_mmu_enabled && vcpu->arch.mmu->root_role.direct; in is_tdp_mmu_active()
[all …]

1234567891011