| /linux/drivers/gpu/drm/tegra/ |
| H A D | gem.c | 56 drm_gem_object_put(&obj->gem); in tegra_bo_put() 63 struct drm_gem_object *gem = &obj->gem; in tegra_bo_pin() local 97 map->size = gem->size; in tegra_bo_pin() 117 err = sg_alloc_table_from_pages(map->sgt, obj->pages, obj->num_pages, 0, gem->size, in tegra_bo_pin() 127 err = dma_get_sgtable(dev, map->sgt, obj->vaddr, obj->iova, gem->size); in tegra_bo_pin() 149 map->size = gem->size; in tegra_bo_pin() 221 drm_gem_object_get(&obj->gem); in tegra_bo_get() 250 bo->mm, bo->gem.size, PAGE_SIZE, 0, 0); in tegra_bo_iommu_map() 309 bo->gem.funcs = &tegra_gem_object_funcs; in tegra_bo_alloc_object() 314 err = drm_gem_object_init(drm, &bo->gem, size); in tegra_bo_alloc_object() [all …]
|
| H A D | fb.c | 122 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc() 141 struct drm_gem_object *gem; in tegra_fb_create() local 151 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create() 152 if (!gem) { in tegra_fb_create() 162 if (gem->size < size) { in tegra_fb_create() 164 drm_gem_object_put(gem); in tegra_fb_create() 168 planes[i] = to_tegra_bo(gem); in tegra_fb_create() 181 drm_gem_object_put(&planes[i]->gem); in tegra_fb_create()
|
| H A D | drm.c | 252 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 259 if (offset & 3 || offset > obj->gem.size) { in tegra_drm_submit() 282 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 290 reloc->cmdbuf.offset >= obj->gem.size) { in tegra_drm_submit() 296 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 298 if (reloc->target.offset >= obj->gem.size) { in tegra_drm_submit() 368 struct drm_gem_object *gem; in tegra_gem_mmap() local 371 gem = drm_gem_object_lookup(file, args->handle); in tegra_gem_mmap() 372 if (!gem) in tegra_gem_mmap() 375 bo = to_tegra_bo(gem); in tegra_gem_mmap() [all …]
|
| H A D | fbdev.c | 33 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap() 37 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap() 107 drm_gem_object_put(&bo->gem); in tegra_fbdev_driver_fbdev_probe()
|
| /linux/drivers/gpu/drm/ |
| H A D | drm_client.c | 187 struct drm_gem_object *gem; in drm_client_buffer_delete() local 193 gem = buffer->fb->obj[0]; in drm_client_buffer_delete() 194 drm_gem_vunmap(gem, &buffer->map); in drm_client_buffer_delete() 201 drm_gem_object_put(buffer->gem); in drm_client_buffer_delete() 255 buffer->gem = obj; in drm_client_buffer_create() 292 struct drm_gem_object *gem = buffer->fb->obj[0]; in drm_client_buffer_vmap_local() local 296 drm_gem_lock(gem); in drm_client_buffer_vmap_local() 298 ret = drm_gem_vmap_locked(gem, map); in drm_client_buffer_vmap_local() 306 drm_gem_unlock(gem); in drm_client_buffer_vmap_local() 321 struct drm_gem_object *gem = buffer->fb->obj[0]; in drm_client_buffer_vunmap_local() local [all …]
|
| H A D | drm_gpuvm.c | 1323 struct drm_gem_object *obj = va->gem.obj; in drm_gpuvm_prepare_range() 1595 INIT_LIST_HEAD(&vm_bo->list.entry.gem); in drm_gpuvm_bo_create() 1656 list_del(&vm_bo->list.entry.gem); in drm_gpuvm_bo_destroy() 1708 list_del(&vm_bo->list.entry.gem); in drm_gpuvm_bo_into_zombie() 1876 list_add_tail(&vm_bo->list.entry.gem, &obj->gpuva.list); in drm_gpuvm_bo_obtain_locked() 1919 list_add_tail(&__vm_bo->list.entry.gem, &obj->gpuva.list); in drm_gpuvm_bo_obtain_prealloc() 2096 struct drm_gem_object *obj = va->gem.obj; in drm_gpuva_link() 2107 list_add_tail(&va->gem.entry, &vm_bo->list.gpuva); in drm_gpuva_link() 2131 struct drm_gem_object *obj = va->gem.obj; in drm_gpuva_unlink() 2138 list_del_init(&va->gem.entry); in drm_gpuva_unlink() [all …]
|
| H A D | drm_gem_vram_helper.c | 186 struct drm_gem_object *gem; in drm_gem_vram_create() local 195 gem = dev->driver->gem_create_object(dev, size); in drm_gem_vram_create() 196 if (IS_ERR(gem)) in drm_gem_vram_create() 197 return ERR_CAST(gem); in drm_gem_vram_create() 198 gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_create() 203 gem = &gbo->bo.base; in drm_gem_vram_create() 206 if (!gem->funcs) in drm_gem_vram_create() 207 gem->funcs = &drm_gem_vram_object_funcs; in drm_gem_vram_create() 209 ret = drm_gem_object_init(dev, gem, size); in drm_gem_vram_create() 520 static void drm_gem_vram_object_free(struct drm_gem_object *gem) in drm_gem_vram_object_free() argument [all …]
|
| /linux/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_gem.c | 77 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument 79 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del() 97 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument 100 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open() 183 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument 186 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close() 299 nouveau_gem_info(struct drm_file *file_priv, struct drm_gem_object *gem, in nouveau_gem_info() argument 303 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_info() 372 nouveau_gem_set_domain(struct drm_gem_object *gem, uint32_t read_domains, in nouveau_gem_set_domain() argument 375 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_set_domain() [all …]
|
| H A D | nouveau_gem.h | 11 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument 13 return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL; in nouveau_gem_object()
|
| H A D | nouveau_ttm.c | 192 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram() 198 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram() 221 unsigned long size_pages = drm->gem.gart_available >> PAGE_SHIFT; in nouveau_ttm_init_gtt() 315 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init() 331 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init() 333 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init() 345 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init() 346 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
|
| H A D | nouveau_uvmm.c | 76 } gem; member 195 u64 offset = uvma->va.gem.offset; in nouveau_uvma_map() 237 drm_gem_object_get(uvma->va.gem.obj); in nouveau_uvma_gem_get() 243 drm_gem_object_put(uvma->va.gem.obj); in nouveau_uvma_gem_put() 464 (op->gem.offset & non_page_bits) == 0; in op_map_aligned_to_page_shift() 470 struct nouveau_bo *nvbo = nouveau_gem_object(op->gem.obj); in select_page_shift() 508 drm_WARN_ONCE(op->gem.obj->dev, 1, "Could not find an appropriate page size.\n"); in select_page_shift() 829 return op->map.gem.obj; in op_gem_obj() 835 return op->remap.unmap->va->gem.obj; in op_gem_obj() 837 return op->unmap.va->gem.obj; in op_gem_obj() [all …]
|
| H A D | nouveau_display.c | 259 struct drm_gem_object *gem, in nouveau_framebuffer_new() argument 263 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_framebuffer_new() 323 fb->obj[0] = gem; in nouveau_framebuffer_new() 338 struct drm_gem_object *gem; in nouveau_user_framebuffer_create() local 341 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); in nouveau_user_framebuffer_create() 342 if (!gem) in nouveau_user_framebuffer_create() 345 ret = nouveau_framebuffer_new(dev, info, mode_cmd, gem, &fb); in nouveau_user_framebuffer_create() 349 drm_gem_object_put(gem); in nouveau_user_framebuffer_create()
|
| /linux/drivers/net/ethernet/sun/ |
| H A D | sungem.c | 117 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read() 145 struct gem *gp = netdev_priv(dev); in _sungem_phy_read() 149 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read() 154 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write() 178 struct gem *gp = netdev_priv(dev); in _sungem_phy_write() 182 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write() 187 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints() 193 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints() 200 static void gem_get_cell(struct gem *gp) in gem_get_cell() 214 static void gem_put_cell(struct gem *gp) in gem_put_cell() [all …]
|
| /linux/include/drm/ |
| H A D | drm_gem_ttm_helper.h | 18 const struct drm_gem_object *gem); 19 int drm_gem_ttm_vmap(struct drm_gem_object *gem, 21 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, 23 int drm_gem_ttm_mmap(struct drm_gem_object *gem,
|
| /linux/drivers/gpu/drm/loongson/ |
| H A D | lsdc_ttm.h | 47 static inline struct ttm_buffer_object *to_ttm_bo(struct drm_gem_object *gem) in to_ttm_bo() argument 49 return container_of(gem, struct ttm_buffer_object, base); in to_ttm_bo() 57 static inline struct lsdc_bo *gem_to_lsdc_bo(struct drm_gem_object *gem) in gem_to_lsdc_bo() argument 59 return container_of(gem, struct lsdc_bo, tbo.base); in gem_to_lsdc_bo()
|
| /linux/drivers/gpu/drm/msm/ |
| H A D | msm_gem_vma.c | 330 .offset = vma->gem.offset, in msm_gem_vma_map() 354 if (vma->gem.obj) in msm_gem_vma_close() 355 msm_gem_assert_locked(vma->gem.obj); in msm_gem_vma_close() 402 .gem.obj = obj, in msm_gem_vma_new() 403 .gem.offset = offset, in msm_gem_vma_new() 484 return msm_gem_vma_new(arg->job->vm, op->gem.obj, op->gem.offset, in vma_from_op() 493 struct drm_gem_object *obj = op->map.gem.obj; in msm_gem_vm_sm_step_map() 506 vm_dbg("%p:%p:%p: %016llx %016llx", vma->vm, vma, vma->gem.obj, in msm_gem_vm_sm_step_map() 523 .offset = vma->gem.offset, in msm_gem_vm_sm_step_map() 527 .obj = vma->gem.obj, in msm_gem_vm_sm_step_map() [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_prime.c | 63 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table() 64 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 65 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_mmio_gem.c | 115 u64 xe_mmio_gem_mmap_offset(struct xe_mmio_gem *gem) in xe_mmio_gem_mmap_offset() argument 117 return drm_vma_node_offset_addr(&gem->base.vma_node); in xe_mmio_gem_mmap_offset() 137 void xe_mmio_gem_destroy(struct xe_mmio_gem *gem) in xe_mmio_gem_destroy() argument 139 xe_mmio_gem_free(&gem->base); in xe_mmio_gem_destroy()
|
| /linux/drivers/gpu/drm/renesas/rcar-du/ |
| H A D | rcar_du_vsp.c | 266 struct drm_gem_dma_object *gem = drm_fb_dma_get_gem_obj(fb, i); in rcar_du_vsp_map_fb() local 269 if (gem->sgt) { in rcar_du_vsp_map_fb() 280 ret = sg_alloc_table(sgt, gem->sgt->orig_nents, in rcar_du_vsp_map_fb() 285 src = gem->sgt->sgl; in rcar_du_vsp_map_fb() 287 for (j = 0; j < gem->sgt->orig_nents; ++j) { in rcar_du_vsp_map_fb() 294 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, in rcar_du_vsp_map_fb() 295 gem->dma_addr, gem->base.size); in rcar_du_vsp_map_fb()
|
| /linux/drivers/gpu/drm/qxl/ |
| H A D | qxl_object.c | 42 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 44 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 362 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete() 365 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete() 369 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete() 371 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
|
| /linux/drivers/gpu/drm/imagination/ |
| H A D | pvr_vm.c | 193 .map.gem.obj = gem_from_pvr_gem(bind_op->pvr_obj), in pvr_vm_bind_op_exec() 194 .map.gem.offset = bind_op->offset, in pvr_vm_bind_op_exec() 356 struct pvr_gem_object *pvr_gem = gem_to_pvr_gem(op->map.gem.obj); in pvr_vm_gpuva_map() 360 if ((op->map.gem.offset | op->map.va.range) & ~PVR_DEVICE_PAGE_MASK) in pvr_vm_gpuva_map() 436 pvr_gem_object_get(gem_to_pvr_gem(ctx->prev_va->base.gem.obj)); in pvr_vm_gpuva_remap() 442 pvr_gem_object_get(gem_to_pvr_gem(ctx->next_va->base.gem.obj)); in pvr_vm_gpuva_remap() 866 pvr_obj = gem_to_pvr_gem(va->gem.obj); in pvr_vm_unmap() 900 pvr_obj = gem_to_pvr_gem(va->gem.obj); in pvr_vm_unmap_all() 1164 pvr_obj = gem_to_pvr_gem(va->gem.obj); in pvr_vm_find_gem_object() 1168 *mapped_offset_out = va->gem.offset; in pvr_vm_find_gem_object()
|
| /linux/drivers/accel/ethosu/ |
| H A D | ethosu_job.c | 411 struct drm_gem_object *gem; in ethosu_ioctl_submit_job() local 426 gem = drm_gem_object_lookup(file, job->region_bo_handles[i]); in ethosu_ioctl_submit_job() 427 if (!gem) { in ethosu_ioctl_submit_job() 435 ejob->region_bo[ejob->region_cnt] = gem; in ethosu_ioctl_submit_job() 439 if (to_ethosu_bo(gem)->info) { in ethosu_ioctl_submit_job() 448 if (cmd_info->region_size[i] > gem->size) { in ethosu_ioctl_submit_job() 451 i, cmd_info->region_size[i], gem->size); in ethosu_ioctl_submit_job()
|
| /linux/drivers/gpu/drm/aspeed/ |
| H A D | aspeed_gfx_crtc.c | 170 struct drm_gem_dma_object *gem; in aspeed_gfx_pipe_update() local 187 gem = drm_fb_dma_get_gem_obj(fb, 0); in aspeed_gfx_pipe_update() 188 if (!gem) in aspeed_gfx_pipe_update() 190 writel(gem->dma_addr, priv->base + CRT_ADDR); in aspeed_gfx_pipe_update()
|
| /linux/drivers/gpu/drm/i915/pxp/ |
| H A D | intel_pxp.c | 500 spin_lock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate() 501 list_for_each_entry_safe(ctx, cn, &i915->gem.contexts.list, link) { in intel_pxp_invalidate() 513 spin_unlock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate() 539 spin_lock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate() 543 spin_unlock_irq(&i915->gem.contexts.lock); in intel_pxp_invalidate()
|
| /linux/drivers/gpu/drm/renesas/shmobile/ |
| H A D | shmob_drm_plane.c | 50 struct drm_gem_dma_object *gem; in shmob_drm_plane_compute_base() local 54 gem = drm_fb_dma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base() 55 sstate->dma[0] = gem->dma_addr + fb->offsets[0] in shmob_drm_plane_compute_base() 60 gem = drm_fb_dma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base() 61 sstate->dma[1] = gem->dma_addr + fb->offsets[1] in shmob_drm_plane_compute_base()
|