Lines Matching +full:scatter +full:- +full:gather

1 // SPDX-License-Identifier: GPL-2.0-or-later
12 #include <linux/dma-buf.h>
13 #include <linux/dma-mapping.h>
31 * for devices that do not support scatter-gather DMA (either directly or
35 * the buffer objects are allocated using a traditional page-based
58 * __drm_gem_dma_create - Create a GEM DMA object without allocating memory
67 * A struct drm_gem_dma_object * on success or an ERR_PTR()-encoded negative
77 if (drm->driver->gem_create_object) { in __drm_gem_dma_create()
78 gem_obj = drm->driver->gem_create_object(drm, size); in __drm_gem_dma_create()
85 return ERR_PTR(-ENOMEM); in __drm_gem_dma_create()
86 gem_obj = &dma_obj->base; in __drm_gem_dma_create()
89 if (!gem_obj->funcs) in __drm_gem_dma_create()
90 gem_obj->funcs = &drm_gem_dma_default_funcs; in __drm_gem_dma_create()
95 /* Always use writecombine for dma-buf mappings */ in __drm_gem_dma_create()
96 dma_obj->map_noncoherent = false; in __drm_gem_dma_create()
117 * drm_gem_dma_create - allocate an object with the given size
131 * A struct drm_gem_dma_object * on success or an ERR_PTR()-encoded negative
146 if (dma_obj->map_noncoherent) { in drm_gem_dma_create()
147 dma_obj->vaddr = dma_alloc_noncoherent(drm->dev, size, in drm_gem_dma_create()
148 &dma_obj->dma_addr, in drm_gem_dma_create()
152 dma_obj->vaddr = dma_alloc_wc(drm->dev, size, in drm_gem_dma_create()
153 &dma_obj->dma_addr, in drm_gem_dma_create()
156 if (!dma_obj->vaddr) { in drm_gem_dma_create()
159 ret = -ENOMEM; in drm_gem_dma_create()
166 drm_gem_object_put(&dma_obj->base); in drm_gem_dma_create()
172 * drm_gem_dma_create_with_handle - allocate an object with the given size and
174 * @file_priv: DRM file-private structure to register the handle for
187 * A struct drm_gem_dma_object * on success or an ERR_PTR()-encoded negative
203 gem_obj = &dma_obj->base; in drm_gem_dma_create_with_handle()
210 /* drop reference from allocate - handle holds it now. */ in drm_gem_dma_create_with_handle()
219 * drm_gem_dma_free - free resources associated with a DMA GEM object
228 struct drm_gem_object *gem_obj = &dma_obj->base; in drm_gem_dma_free()
229 struct iosys_map map = IOSYS_MAP_INIT_VADDR(dma_obj->vaddr); in drm_gem_dma_free()
231 if (gem_obj->import_attach) { in drm_gem_dma_free()
232 if (dma_obj->vaddr) in drm_gem_dma_free()
233 dma_buf_vunmap_unlocked(gem_obj->import_attach->dmabuf, &map); in drm_gem_dma_free()
234 drm_prime_gem_destroy(gem_obj, dma_obj->sgt); in drm_gem_dma_free()
235 } else if (dma_obj->vaddr) { in drm_gem_dma_free()
236 if (dma_obj->map_noncoherent) in drm_gem_dma_free()
237 dma_free_noncoherent(gem_obj->dev->dev, dma_obj->base.size, in drm_gem_dma_free()
238 dma_obj->vaddr, dma_obj->dma_addr, in drm_gem_dma_free()
241 dma_free_wc(gem_obj->dev->dev, dma_obj->base.size, in drm_gem_dma_free()
242 dma_obj->vaddr, dma_obj->dma_addr); in drm_gem_dma_free()
252 * drm_gem_dma_dumb_create_internal - create a dumb buffer object
253 * @file_priv: DRM file-private structure to create the dumb buffer for
269 unsigned int min_pitch = DIV_ROUND_UP(args->width * args->bpp, 8); in drm_gem_dma_dumb_create_internal()
272 if (args->pitch < min_pitch) in drm_gem_dma_dumb_create_internal()
273 args->pitch = min_pitch; in drm_gem_dma_dumb_create_internal()
275 if (args->size < args->pitch * args->height) in drm_gem_dma_dumb_create_internal()
276 args->size = args->pitch * args->height; in drm_gem_dma_dumb_create_internal()
278 dma_obj = drm_gem_dma_create_with_handle(file_priv, drm, args->size, in drm_gem_dma_dumb_create_internal()
279 &args->handle); in drm_gem_dma_dumb_create_internal()
285 * drm_gem_dma_dumb_create - create a dumb buffer object
286 * @file_priv: DRM file-private structure to create the dumb buffer for
308 args->pitch = DIV_ROUND_UP(args->width * args->bpp, 8); in drm_gem_dma_dumb_create()
309 args->size = args->pitch * args->height; in drm_gem_dma_dumb_create()
311 dma_obj = drm_gem_dma_create_with_handle(file_priv, drm, args->size, in drm_gem_dma_dumb_create()
312 &args->handle); in drm_gem_dma_dumb_create()
325 * drm_gem_dma_get_unmapped_area - propose address for mapping in noMMU cases
348 struct drm_file *priv = filp->private_data; in drm_gem_dma_get_unmapped_area()
349 struct drm_device *dev = priv->minor->dev; in drm_gem_dma_get_unmapped_area()
353 return -ENODEV; in drm_gem_dma_get_unmapped_area()
355 drm_vma_offset_lock_lookup(dev->vma_offset_manager); in drm_gem_dma_get_unmapped_area()
356 node = drm_vma_offset_exact_lookup_locked(dev->vma_offset_manager, in drm_gem_dma_get_unmapped_area()
362 * When the object is being freed, after it hits 0-refcnt it in drm_gem_dma_get_unmapped_area()
365 * mgr->vm_lock. Therefore if we find an object with a 0-refcnt in drm_gem_dma_get_unmapped_area()
367 * destroyed and will be freed as soon as we release the lock - in drm_gem_dma_get_unmapped_area()
368 * so we have to check for the 0-refcnted object and treat it as in drm_gem_dma_get_unmapped_area()
371 if (!kref_get_unless_zero(&obj->refcount)) in drm_gem_dma_get_unmapped_area()
375 drm_vma_offset_unlock_lookup(dev->vma_offset_manager); in drm_gem_dma_get_unmapped_area()
378 return -EINVAL; in drm_gem_dma_get_unmapped_area()
382 return -EACCES; in drm_gem_dma_get_unmapped_area()
389 return dma_obj->vaddr ? (unsigned long)dma_obj->vaddr : -EINVAL; in drm_gem_dma_get_unmapped_area()
395 * drm_gem_dma_print_info() - Print &drm_gem_dma_object info for debugfs
405 drm_printf_indent(p, indent, "dma_addr=%pad\n", &dma_obj->dma_addr); in drm_gem_dma_print_info()
406 drm_printf_indent(p, indent, "vaddr=%p\n", dma_obj->vaddr); in drm_gem_dma_print_info()
411 * drm_gem_dma_get_sg_table - provide a scatter/gather table of pinned
415 * This function exports a scatter/gather table by calling the standard
419 * A pointer to the scatter/gather table of pinned pages or NULL on failure.
423 struct drm_gem_object *obj = &dma_obj->base; in drm_gem_dma_get_sg_table()
429 return ERR_PTR(-ENOMEM); in drm_gem_dma_get_sg_table()
431 ret = dma_get_sgtable(obj->dev->dev, sgt, dma_obj->vaddr, in drm_gem_dma_get_sg_table()
432 dma_obj->dma_addr, obj->size); in drm_gem_dma_get_sg_table()
445 * drm_gem_dma_prime_import_sg_table - produce a DMA GEM object from another
446 * driver's scatter/gather table of pinned pages
448 * @attach: DMA-BUF attachment
449 * @sgt: scatter/gather table of pinned pages
451 * This function imports a scatter/gather table exported via DMA-BUF by
453 * (i.e. the scatter/gather table must contain a single entry). Drivers that
458 * A pointer to a newly created GEM object or an ERR_PTR-encoded negative
469 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_dma_prime_import_sg_table()
470 return ERR_PTR(-EINVAL); in drm_gem_dma_prime_import_sg_table()
473 dma_obj = __drm_gem_dma_create(dev, attach->dmabuf->size, true); in drm_gem_dma_prime_import_sg_table()
477 dma_obj->dma_addr = sg_dma_address(sgt->sgl); in drm_gem_dma_prime_import_sg_table()
478 dma_obj->sgt = sgt; in drm_gem_dma_prime_import_sg_table()
480 drm_dbg_prime(dev, "dma_addr = %pad, size = %zu\n", &dma_obj->dma_addr, in drm_gem_dma_prime_import_sg_table()
481 attach->dmabuf->size); in drm_gem_dma_prime_import_sg_table()
483 return &dma_obj->base; in drm_gem_dma_prime_import_sg_table()
488 * drm_gem_dma_vmap - map a DMA GEM object into the kernel's virtual
504 iosys_map_set_vaddr(map, dma_obj->vaddr); in drm_gem_dma_vmap()
511 * drm_gem_dma_mmap - memory-map an exported DMA GEM object
517 * object instead of using on-demand faulting.
524 struct drm_gem_object *obj = &dma_obj->base; in drm_gem_dma_mmap()
532 vma->vm_pgoff -= drm_vma_node_start(&obj->vma_node); in drm_gem_dma_mmap()
535 if (dma_obj->map_noncoherent) { in drm_gem_dma_mmap()
536 vma->vm_page_prot = vm_get_page_prot(vma->vm_flags); in drm_gem_dma_mmap()
538 ret = dma_mmap_pages(dma_obj->base.dev->dev, in drm_gem_dma_mmap()
539 vma, vma->vm_end - vma->vm_start, in drm_gem_dma_mmap()
540 virt_to_page(dma_obj->vaddr)); in drm_gem_dma_mmap()
542 ret = dma_mmap_wc(dma_obj->base.dev->dev, vma, dma_obj->vaddr, in drm_gem_dma_mmap()
543 dma_obj->dma_addr, in drm_gem_dma_mmap()
544 vma->vm_end - vma->vm_start); in drm_gem_dma_mmap()
554 * drm_gem_dma_prime_import_sg_table_vmap - PRIME import another driver's
555 * scatter/gather table and get the virtual address of the buffer
557 * @attach: DMA-BUF attachment
558 * @sgt: Scatter/gather table of pinned pages
560 * This function imports a scatter/gather table using
570 * A pointer to a newly created GEM object or an ERR_PTR-encoded negative
583 ret = dma_buf_vmap_unlocked(attach->dmabuf, &map); in drm_gem_dma_prime_import_sg_table_vmap()
591 dma_buf_vunmap_unlocked(attach->dmabuf, &map); in drm_gem_dma_prime_import_sg_table_vmap()
596 dma_obj->vaddr = map.vaddr; in drm_gem_dma_prime_import_sg_table_vmap()
602 MODULE_DESCRIPTION("DRM DMA memory-management helpers");