Lines Matching +full:prefetch +full:- +full:dma

1 /* SPDX-License-Identifier: GPL-2.0-only OR MIT */
28 #include <linux/dma-resv.h>
43 * enum drm_gpuva_flags - flags for struct drm_gpuva
67 * struct drm_gpuva - structure to track a GPU VA mapping
127 * @rb: structure containing data to store &drm_gpuvas in a rb-tree
131 * @rb.node: the rb-tree node
139 * through the rb-tree while doing modifications on the rb-tree
145 * @rb.__subtree_last: needed by the interval tree, holding last-in-subtree
166 * drm_gpuva_invalidate() - sets whether the backing GEM of this &drm_gpuva is
174 va->flags |= DRM_GPUVA_INVALIDATED; in drm_gpuva_invalidate()
176 va->flags &= ~DRM_GPUVA_INVALIDATED; in drm_gpuva_invalidate()
180 * drm_gpuva_invalidated() - indicates whether the backing BO of this &drm_gpuva
188 return va->flags & DRM_GPUVA_INVALIDATED; in drm_gpuva_invalidated()
192 * enum drm_gpuvm_flags - flags for struct drm_gpuvm
206 * objects associated with this GPUVM. Otherwise, the GEMs dma-resv is
218 * struct drm_gpuvm - DRM GPU VA Manager
260 * @rb.tree: the rb-tree to track GPU VA mappings
352 * drm_gpuvm_get() - acquire a struct drm_gpuvm reference
363 kref_get(&gpuvm->kref); in drm_gpuvm_get()
377 * drm_gpuvm_resv_protected() - indicates whether &DRM_GPUVM_RESV_PROTECTED is
386 return gpuvm->flags & DRM_GPUVM_RESV_PROTECTED; in drm_gpuvm_resv_protected()
390 * drm_gpuvm_immediate_mode() - indicates whether &DRM_GPUVM_IMMEDIATE_MODE is
399 return gpuvm->flags & DRM_GPUVM_IMMEDIATE_MODE; in drm_gpuvm_immediate_mode()
403 * drm_gpuvm_resv() - returns the &drm_gpuvm's &dma_resv
408 #define drm_gpuvm_resv(gpuvm__) ((gpuvm__)->r_obj->resv)
411 * drm_gpuvm_resv_obj() - returns the &drm_gem_object holding the &drm_gpuvm's
418 #define drm_gpuvm_resv_obj(gpuvm__) ((gpuvm__)->r_obj)
433 * drm_gpuvm_is_extobj() - indicates whether the given &drm_gem_object is an
445 return obj && obj->resv != drm_gpuvm_resv(gpuvm); in drm_gpuvm_is_extobj()
451 if (va && !list_is_last(&va->rb.entry, &va->vm->rb.list)) in __drm_gpuva_next()
458 * drm_gpuvm_for_each_va_range() - iterate over a range of &drm_gpuvas
474 for (va__ = drm_gpuva_find_first((gpuvm__), (start__), (end__) - (start__)); \
475 va__ && (va__->va.addr < (end__)); \
479 * drm_gpuvm_for_each_va_range_safe() - safely iterate over a range of
497 for (va__ = drm_gpuva_find_first((gpuvm__), (start__), (end__) - (start__)), \
499 va__ && (va__->va.addr < (end__)); \
503 * drm_gpuvm_for_each_va() - iterate over all &drm_gpuvas
511 list_for_each_entry(va__, &(gpuvm__)->rb.list, rb.entry)
514 * drm_gpuvm_for_each_va_safe() - safely iterate over all &drm_gpuvas
524 list_for_each_entry_safe(va__, next__, &(gpuvm__)->rb.list, rb.entry)
527 * struct drm_gpuvm_exec - &drm_gpuvm abstraction of &drm_exec
545 * @vm: the &drm_gpuvm to lock its DMA reservations
596 * drm_gpuvm_exec_unlock() - lock all dma-resv of all assoiciated BOs
599 * Releases all dma-resv locks of all &drm_gem_objects previously acquired
607 drm_exec_fini(&vm_exec->exec); in drm_gpuvm_exec_unlock()
618 * drm_gpuvm_exec_resv_add_fence() - add fence to private and all extobj
621 * @private_usage: private dma-resv usage
622 * @extobj_usage: extobj dma-resv usage
632 drm_gpuvm_resv_add_fence(vm_exec->vm, &vm_exec->exec, fence, in drm_gpuvm_exec_resv_add_fence()
637 * drm_gpuvm_exec_validate() - validate all BOs marked as evicted
647 return drm_gpuvm_validate(vm_exec->vm, &vm_exec->exec); in drm_gpuvm_exec_validate()
651 * struct drm_gpuvm_bo - structure representing a &drm_gpuvm and
659 * Furthermore it is used cache evicted GEM objects for a certain GPU-VM to
663 * a GEM object is mapped first in a GPU-VM and release the instance once the
664 * last mapping of the GEM object in this GPU-VM is unmapped.
681 * protected by the &drm_gem_object's dma-resv lock.
745 * drm_gpuvm_bo_get() - acquire a struct drm_gpuvm_bo reference
756 kref_get(&vm_bo->kref); in drm_gpuvm_bo_get()
772 * drm_gpuvm_bo_gem_evict() - add/remove all &drm_gpuvm_bo's in the list
785 drm_gem_gpuva_assert_lock_held(vm_bo->vm, obj); in drm_gpuvm_bo_gem_evict()
793 * drm_gpuvm_bo_for_each_va() - iterator to walk over a list of &drm_gpuva
803 list_for_each_entry(va__, &(vm_bo)->list.gpuva, gem.entry)
806 * drm_gpuvm_bo_for_each_va_safe() - iterator to safely walk over a list of
819 list_for_each_entry_safe(va__, next__, &(vm_bo)->list.gpuva, gem.entry)
822 * enum drm_gpuva_op_type - GPU VA operation type
843 * @DRM_GPUVA_OP_PREFETCH: the prefetch op type
854 * struct drm_gpuva_op_map - GPU VA map operation
893 * struct drm_gpuva_op_unmap - GPU VA unmap operation
918 * struct drm_gpuva_op_remap - GPU VA remap operation
958 * struct drm_gpuva_op_prefetch - GPU VA prefetch operation
960 * This structure represents a single prefetch operation generated by the
965 * @va: the &drm_gpuva to prefetch
971 * struct drm_gpuva_op - GPU VA operation
1008 * @prefetch: the prefetch operation
1010 struct drm_gpuva_op_prefetch prefetch; member
1015 * struct drm_gpuva_ops - wraps a list of &drm_gpuva_op
1025 * drm_gpuva_for_each_op() - iterator to walk over &drm_gpuva_ops
1031 #define drm_gpuva_for_each_op(op, ops) list_for_each_entry(op, &(ops)->list, entry)
1034 * drm_gpuva_for_each_op_safe() - iterator to safely walk over &drm_gpuva_ops
1043 list_for_each_entry_safe(op, next, &(ops)->list, entry)
1046 * drm_gpuva_for_each_op_from_reverse() - iterate backwards from the given point
1054 list_for_each_entry_from_reverse(op, &(ops)->list, entry)
1057 * drm_gpuva_for_each_op_reverse - iterator to walk over &drm_gpuva_ops in reverse
1064 list_for_each_entry_reverse(op, &(ops)->list, entry)
1067 * drm_gpuva_first_op() - returns the first &drm_gpuva_op from &drm_gpuva_ops
1071 list_first_entry(&(ops)->list, struct drm_gpuva_op, entry)
1074 * drm_gpuva_last_op() - returns the last &drm_gpuva_op from &drm_gpuva_ops
1078 list_last_entry(&(ops)->list, struct drm_gpuva_op, entry)
1081 * drm_gpuva_prev_op() - previous &drm_gpuva_op in the list
1087 * drm_gpuva_next_op() - next &drm_gpuva_op in the list
1093 * struct drm_gpuvm_map_req - arguments passed to drm_gpuvm_sm_map[_ops_create]()
1126 va->va.addr = op->va.addr; in drm_gpuva_init_from_op()
1127 va->va.range = op->va.range; in drm_gpuva_init_from_op()
1128 va->gem.obj = op->gem.obj; in drm_gpuva_init_from_op()
1129 va->gem.offset = op->gem.offset; in drm_gpuva_init_from_op()
1133 * struct drm_gpuvm_ops - callbacks for split/merge steps
1277 * drm_gpuva_op_remap_to_unmap_range() - Helper to get the start and range of
1285 * re-mapped, but is now empty.
1291 const u64 va_start = op->prev ? in drm_gpuva_op_remap_to_unmap_range()
1292 op->prev->va.addr + op->prev->va.range : in drm_gpuva_op_remap_to_unmap_range()
1293 op->unmap->va->va.addr; in drm_gpuva_op_remap_to_unmap_range()
1294 const u64 va_end = op->next ? in drm_gpuva_op_remap_to_unmap_range()
1295 op->next->va.addr : in drm_gpuva_op_remap_to_unmap_range()
1296 op->unmap->va->va.addr + op->unmap->va->va.range; in drm_gpuva_op_remap_to_unmap_range()
1301 *range = va_end - va_start; in drm_gpuva_op_remap_to_unmap_range()