Lines Matching refs:fence
155 struct dma_fence *fence; in amdgpu_userq_fence_driver_process() local
167 fence = &userq_fence->base; in amdgpu_userq_fence_driver_process()
169 if (rptr < fence->seqno) in amdgpu_userq_fence_driver_process()
172 dma_fence_signal(fence); in amdgpu_userq_fence_driver_process()
178 dma_fence_put(fence); in amdgpu_userq_fence_driver_process()
190 struct amdgpu_userq_fence *fence, *tmp; in amdgpu_userq_fence_driver_destroy() local
196 list_for_each_entry_safe(fence, tmp, &fence_drv->fences, link) { in amdgpu_userq_fence_driver_destroy()
197 f = &fence->base; in amdgpu_userq_fence_driver_destroy()
204 list_del(&fence->link); in amdgpu_userq_fence_driver_destroy()
241 struct dma_fence *fence; in amdgpu_userq_fence_create() local
250 fence = &userq_fence->base; in amdgpu_userq_fence_create()
253 dma_fence_init64(fence, &amdgpu_userq_fence_ops, &userq_fence->lock, in amdgpu_userq_fence_create()
257 dma_fence_get(fence); in amdgpu_userq_fence_create()
289 if (!dma_fence_is_signaled(fence)) in amdgpu_userq_fence_create()
292 dma_fence_put(fence); in amdgpu_userq_fence_create()
296 *f = fence; in amdgpu_userq_fence_create()
308 struct amdgpu_userq_fence *fence = to_amdgpu_userq_fence(f); in amdgpu_userq_fence_get_timeline_name() local
310 return fence->fence_drv->timeline_name; in amdgpu_userq_fence_get_timeline_name()
315 struct amdgpu_userq_fence *fence = to_amdgpu_userq_fence(f); in amdgpu_userq_fence_signaled() local
316 struct amdgpu_userq_fence_driver *fence_drv = fence->fence_drv; in amdgpu_userq_fence_signaled()
320 wptr = fence->base.seqno; in amdgpu_userq_fence_signaled()
330 struct dma_fence *fence = container_of(rcu, struct dma_fence, rcu); in amdgpu_userq_fence_free() local
331 struct amdgpu_userq_fence *userq_fence = to_amdgpu_userq_fence(fence); in amdgpu_userq_fence_free()
419 static void amdgpu_userq_fence_cleanup(struct dma_fence *fence) in amdgpu_userq_fence_cleanup() argument
421 dma_fence_put(fence); in amdgpu_userq_fence_cleanup()
425 amdgpu_userq_fence_driver_set_error(struct amdgpu_userq_fence *fence, in amdgpu_userq_fence_driver_set_error() argument
428 struct amdgpu_userq_fence_driver *fence_drv = fence->fence_drv; in amdgpu_userq_fence_driver_set_error()
434 f = rcu_dereference_protected(&fence->base, in amdgpu_userq_fence_driver_set_error()
447 struct amdgpu_userq_fence *fence = to_amdgpu_userq_fence(f); in amdgpu_userq_fence_driver_force_completion() local
448 struct amdgpu_userq_fence_driver *fence_drv = fence->fence_drv; in amdgpu_userq_fence_driver_force_completion()
449 u64 wptr = fence->base.seqno; in amdgpu_userq_fence_driver_force_completion()
451 amdgpu_userq_fence_driver_set_error(fence, -ECANCELED); in amdgpu_userq_fence_driver_force_completion()
474 struct dma_fence *fence; in amdgpu_userq_signal_ioctl() local
567 r = amdgpu_userq_fence_create(queue, userq_fence, wptr, &fence); in amdgpu_userq_signal_ioctl()
575 queue->last_fence = dma_fence_get(fence); in amdgpu_userq_signal_ioctl()
587 amdgpu_userq_fence_cleanup(fence); in amdgpu_userq_signal_ioctl()
594 amdgpu_userq_fence_cleanup(fence); in amdgpu_userq_signal_ioctl()
603 dma_resv_add_fence(gobj_read[i]->resv, fence, in amdgpu_userq_signal_ioctl()
611 dma_resv_add_fence(gobj_write[i]->resv, fence, in amdgpu_userq_signal_ioctl()
617 drm_syncobj_replace_fence(syncobj[i], fence); in amdgpu_userq_signal_ioctl()
620 dma_fence_put(fence); in amdgpu_userq_signal_ioctl()
755 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
762 &fence); in amdgpu_userq_wait_ioctl()
766 dma_fence_unwrap_for_each(f, &iter, fence) in amdgpu_userq_wait_ioctl()
769 dma_fence_put(fence); in amdgpu_userq_wait_ioctl()
775 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
780 &fence); in amdgpu_userq_wait_ioctl()
785 dma_fence_put(fence); in amdgpu_userq_wait_ioctl()
791 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
794 DMA_RESV_USAGE_READ, fence) in amdgpu_userq_wait_ioctl()
800 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
803 DMA_RESV_USAGE_WRITE, fence) in amdgpu_userq_wait_ioctl()
832 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
835 DMA_RESV_USAGE_READ, fence) { in amdgpu_userq_wait_ioctl()
841 fences[num_fences++] = fence; in amdgpu_userq_wait_ioctl()
842 dma_fence_get(fence); in amdgpu_userq_wait_ioctl()
849 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
852 DMA_RESV_USAGE_WRITE, fence) { in amdgpu_userq_wait_ioctl()
858 fences[num_fences++] = fence; in amdgpu_userq_wait_ioctl()
859 dma_fence_get(fence); in amdgpu_userq_wait_ioctl()
865 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
872 &fence); in amdgpu_userq_wait_ioctl()
876 dma_fence_unwrap_for_each(f, &iter, fence) { in amdgpu_userq_wait_ioctl()
886 dma_fence_put(fence); in amdgpu_userq_wait_ioctl()
892 struct dma_fence *fence; in amdgpu_userq_wait_ioctl() local
897 &fence); in amdgpu_userq_wait_ioctl()
906 fences[num_fences++] = fence; in amdgpu_userq_wait_ioctl()