Home
last modified time | relevance | path

Searched refs:mmu_context (Results 1 – 17 of 17) sorted by relevance

/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_buffer.c124 &gpu->mmu_context->cmdbuf_mapping) + in etnaviv_buffer_dump()
159 &gpu->mmu_context->cmdbuf_mapping) + in etnaviv_buffer_reserve()
174 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_buffer_init()
326 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_sync_point_queue()
341 struct etnaviv_iommu_context *mmu_context, unsigned int event, in etnaviv_buffer_queue() argument
349 bool switch_mmu_context = gpu->mmu_context != mmu_context; in etnaviv_buffer_queue()
350 unsigned int new_flush_seq = READ_ONCE(gpu->mmu_context->flush_seq); in etnaviv_buffer_queue()
361 &gpu->mmu_context->cmdbuf_mapping); in etnaviv_buffer_queue()
377 if (gpu->mmu_context->global->version == ETNAVIV_IOMMU_V1) in etnaviv_buffer_queue()
399 struct etnaviv_iommu_context *old_context = gpu->mmu_context; in etnaviv_buffer_queue()
[all …]
H A Detnaviv_dump.c133 mutex_lock(&submit->mmu_context->lock); in etnaviv_core_dump()
135 mmu_size = etnaviv_iommu_dump_size(submit->mmu_context); in etnaviv_core_dump()
164 mutex_unlock(&submit->mmu_context->lock); in etnaviv_core_dump()
176 etnaviv_core_dump_mmu(&iter, submit->mmu_context, mmu_size); in etnaviv_core_dump()
180 &submit->mmu_context->cmdbuf_mapping)); in etnaviv_core_dump()
185 &submit->mmu_context->cmdbuf_mapping)); in etnaviv_core_dump()
187 mutex_unlock(&submit->mmu_context->lock); in etnaviv_core_dump()
H A Detnaviv_drv.c153 struct etnaviv_iommu_context *mmu_context; in etnaviv_mmu_show() local
163 mmu_context = gpu->mmu_context; in etnaviv_mmu_show()
164 if (mmu_context) in etnaviv_mmu_show()
165 etnaviv_iommu_context_get(mmu_context); in etnaviv_mmu_show()
168 if (!mmu_context) in etnaviv_mmu_show()
171 mutex_lock(&mmu_context->lock); in etnaviv_mmu_show()
172 drm_mm_print(&mmu_context->mm, &p); in etnaviv_mmu_show()
173 mutex_unlock(&mmu_context->lock); in etnaviv_mmu_show()
175 etnaviv_iommu_context_put(mmu_context); in etnaviv_mmu_show()
H A Detnaviv_iommu_v2.c175 if (gpu->mmu_context) in etnaviv_iommuv2_restore_nonsec()
176 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv2_restore_nonsec()
177 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv2_restore_nonsec()
199 if (gpu->mmu_context) in etnaviv_iommuv2_restore_sec()
200 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv2_restore_sec()
201 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv2_restore_sec()
H A Detnaviv_iommu.c95 if (gpu->mmu_context) in etnaviv_iommuv1_restore()
96 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_iommuv1_restore()
97 gpu->mmu_context = etnaviv_iommu_context_get(context); in etnaviv_iommuv1_restore()
H A Detnaviv_gem.c241 struct drm_gem_object *obj, struct etnaviv_iommu_context *mmu_context, in etnaviv_gem_mapping_get() argument
250 mapping = etnaviv_gem_get_vram_mapping(etnaviv_obj, mmu_context); in etnaviv_gem_mapping_get()
259 mutex_lock(&mmu_context->lock); in etnaviv_gem_mapping_get()
260 if (mapping->context == mmu_context) in etnaviv_gem_mapping_get()
269 mutex_unlock(&mmu_context->lock); in etnaviv_gem_mapping_get()
304 ret = etnaviv_iommu_map_gem(mmu_context, etnaviv_obj, in etnaviv_gem_mapping_get()
305 mmu_context->global->memory_base, in etnaviv_gem_mapping_get()
H A Detnaviv_gem.h94 struct etnaviv_iommu_context *mmu_context, *prev_mmu_context; member
120 struct drm_gem_object *obj, struct etnaviv_iommu_context *mmu_context,
H A Detnaviv_gem_submit.c222 submit->mmu_context, in submit_pin_objects()
368 if (submit->mmu_context) in submit_cleanup()
369 etnaviv_iommu_context_put(submit->mmu_context); in submit_cleanup()
532 submit->mmu_context = etnaviv_iommu_context_get(submit->ctx->mmu); in etnaviv_ioctl_gem_submit()
H A Detnaviv_gpu.c602 if (gpu->mmu_context) in etnaviv_hw_reset()
603 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_hw_reset()
604 gpu->mmu_context = NULL; in etnaviv_hw_reset()
697 &gpu->mmu_context->cmdbuf_mapping); in etnaviv_gpu_start_fe_idleloop()
1409 etnaviv_gpu_start_fe_idleloop(gpu, submit->mmu_context); in etnaviv_gpu_submit()
1413 submit->prev_mmu_context = etnaviv_iommu_context_get(gpu->mmu_context); in etnaviv_gpu_submit()
1424 etnaviv_buffer_queue(gpu, submit->exec_state, submit->mmu_context, in etnaviv_gpu_submit()
1828 if (gpu->mmu_context) in etnaviv_gpu_unbind()
1829 etnaviv_iommu_context_put(gpu->mmu_context); in etnaviv_gpu_unbind()
H A Detnaviv_gpu.h152 struct etnaviv_iommu_context *mmu_context; member
/linux/arch/microblaze/mm/
H A DMakefile6 obj-y := consistent.o init.o pgtable.o mmu_context.o fault.o
/linux/arch/powerpc/mm/book3s32/
H A DMakefile9 obj-y += mmu.o mmu_context.o
/linux/arch/nios2/mm/
H A DMakefile12 obj-y += mmu_context.o
/linux/arch/powerpc/mm/nohash/
H A DMakefile3 obj-y += mmu_context.o tlb.o tlb_low.o kup.o
/linux/arch/powerpc/mm/
H A DMakefile9 init-common.o mmu_context.o drmem.o \
/linux/arch/powerpc/mm/book3s64/
H A DMakefile3 obj-y += mmu_context.o pgtable.o trace.o
/linux/include/asm-generic/
H A DKbuild40 mandatory-y += mmu_context.h