Lines Matching +full:p +full:- +full:tile

1 /* SPDX-License-Identifier: MIT */
36 drm_gpuvm_get(&vm->gpuvm); in xe_vm_get()
42 drm_gpuvm_put(&vm->gpuvm); in xe_vm_put()
51 /* Only guaranteed not to change when vm->lock is held */ in xe_vm_is_closed()
52 return !vm->size; in xe_vm_is_closed()
57 return vm->flags & XE_VM_FLAG_BANNED; in xe_vm_is_banned()
62 lockdep_assert_held(&vm->lock); in xe_vm_is_closed_or_banned()
72 * xe_vm_has_scratch() - Whether the vm is configured for scratch PTEs
79 return vm->flags & XE_VM_FLAG_SCRATCH_PAGE; in xe_vm_has_scratch()
83 * gpuvm_to_vm() - Return the embedding xe_vm from a struct drm_gpuvm pointer
95 return gpuvm_to_vm(gpuva->vm); in gpuva_to_vm()
114 return vma->gpuva.va.addr; in xe_vma_start()
119 return vma->gpuva.va.range; in xe_vma_size()
129 return vma->gpuva.gem.offset; in xe_vma_bo_offset()
134 return !vma->gpuva.gem.obj ? NULL : in xe_vma_bo()
135 container_of(vma->gpuva.gem.obj, struct xe_bo, ttm.base); in xe_vma_bo()
140 return container_of(vma->gpuva.vm, struct xe_vm, gpuvm); in xe_vma_vm()
145 return vma->gpuva.flags & XE_VMA_READ_ONLY; in xe_vma_read_only()
150 return vma->gpuva.gem.offset; in xe_vma_userptr()
155 return vma->gpuva.flags & DRM_GPUVA_SPARSE; in xe_vma_is_null()
160 return vma->gpuva.flags & XE_VMA_SYSTEM_ALLOCATOR; in xe_vma_is_cpu_addr_mirror()
183 * to_userptr_vma() - Return a pointer to an embedding userptr vma
190 xe_assert(xe_vma_vm(vma)->xe, xe_vma_is_userptr(vma)); in to_userptr_vma()
194 u64 xe_vm_pdp4_descriptor(struct xe_vm *vm, struct xe_tile *tile);
207 return vm->flags & XE_VM_FLAG_FAULT_MODE; in xe_vm_in_fault_mode()
212 return vm->flags & XE_VM_FLAG_LR_MODE; in xe_vm_in_lr_mode()
242 xe_assert(vm->xe, xe_vm_in_preempt_fence_mode(vm)); in xe_vm_queue_rebind_worker()
243 queue_work(vm->xe->ordered_wq, &vm->preempt.rebind_work); in xe_vm_queue_rebind_worker()
247 * xe_vm_reactivate_rebind() - Reactivate the rebind functionality on compute
257 if (xe_vm_in_preempt_fence_mode(vm) && vm->preempt.rebind_deactivated) { in xe_vm_reactivate_rebind()
258 vm->preempt.rebind_deactivated = false; in xe_vm_reactivate_rebind()
275 * xe_vm_resv() - Return's the vm's reservation object
282 return drm_gpuvm_resv(&vm->gpuvm); in xe_vm_resv()
288 * xe_vm_assert_held(vm) - Assert that the vm's reservation object is held.
306 void xe_vm_snapshot_print(struct xe_vm_snapshot *snap, struct drm_printer *p);
310 * xe_vm_set_validating() - Register this task as currently making bos resident
324 WRITE_ONCE(vm->validation.validating, current); in xe_vm_set_validating()
329 * xe_vm_clear_validating() - Unregister this task as currently making bos resident
342 WRITE_ONCE(vm->validation.validating, NULL); in xe_vm_clear_validating()
347 * xe_vm_is_validating() - Whether bos bound to the vm are currently being made resident
360 if (READ_ONCE(vm->validation.validating) == current) { in xe_vm_is_validating()
368 * xe_vm_set_validation_exec() - Accessor to set the drm_exec object
377 xe_assert(vm->xe, !!exec ^ !!vm->validation._exec); in xe_vm_set_validation_exec()
378 vm->validation._exec = exec; in xe_vm_set_validation_exec()
382 * xe_vm_set_validation_exec() - Accessor to read the drm_exec object
392 return vm->validation._exec; in xe_vm_validation_exec()
396 * xe_vm_has_valid_gpu_mapping() - Advisory helper to check if VMA or SVM range has
398 * @tile: The tile which the GPU mapping belongs to
399 * @tile_present: Tile present mask
400 * @tile_invalidated: Tile invalidated mask
405 * the BO dma-resv lock in the BO case. As such, they should only be used in
411 #define xe_vm_has_valid_gpu_mapping(tile, tile_present, tile_invalidated) \ argument
412 ((READ_ONCE(tile_present) & ~READ_ONCE(tile_invalidated)) & BIT((tile)->id))