Home
last modified time | relevance | path

Searched refs:s_fence (Results 1 – 24 of 24) sorted by relevance

/linux/drivers/gpu/drm/scheduler/
H A Dsched_main.c370 struct drm_sched_fence *s_fence = s_job->s_fence; in drm_sched_job_done() local
371 struct drm_gpu_scheduler *sched = s_fence->sched; in drm_sched_job_done()
376 trace_drm_sched_job_done(s_fence); in drm_sched_job_done()
378 dma_fence_get(&s_fence->finished); in drm_sched_job_done()
379 drm_sched_fence_finished(s_fence, result); in drm_sched_job_done()
380 dma_fence_put(&s_fence->finished); in drm_sched_job_done()
629 if (s_job->s_fence->parent && in drm_sched_stop()
630 dma_fence_remove_callback(s_job->s_fence->parent, in drm_sched_stop()
632 dma_fence_put(s_job->s_fence->parent); in drm_sched_stop()
633 s_job->s_fence->parent = NULL; in drm_sched_stop()
[all …]
H A Dsched_entity.c187 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in drm_sched_entity_kill_jobs_work() local
189 if (s_fence && f == &s_fence->scheduled) { in drm_sched_entity_kill_jobs_work()
195 f = dma_fence_get_rcu(&s_fence->finished); in drm_sched_entity_kill_jobs_work()
201 dma_fence_put(&s_fence->scheduled); in drm_sched_entity_kill_jobs_work()
212 drm_sched_fence_scheduled(job->s_fence, NULL); in drm_sched_entity_kill_jobs_work()
213 drm_sched_fence_finished(job->s_fence, -ESRCH); in drm_sched_entity_kill_jobs_work()
214 WARN_ON(job->s_fence->parent); in drm_sched_entity_kill_jobs_work()
252 struct drm_sched_fence *s_fence = job->s_fence; in drm_sched_entity_kill() local
254 dma_fence_get(&s_fence->finished); in drm_sched_entity_kill()
266 prev = &s_fence->finished; in drm_sched_entity_kill()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_tlb_inval_job.c277 dma_fence_get(&job->dep.drm.s_fence->finished); in xe_tlb_inval_job_push()
282 &job->dep.drm.s_fence->finished, in xe_tlb_inval_job_push()
293 return &job->dep.drm.s_fence->finished; in xe_tlb_inval_job_push()
H A Dxe_exec.c325 drm_gpuvm_resv_add_fence(&vm->gpuvm, exec, &job->drm.s_fence->finished, in xe_exec_ioctl()
330 xe_sync_entry_signal(&syncs[i], &job->drm.s_fence->finished); in xe_exec_ioctl()
335 xe_exec_queue_last_fence_set(q, vm, &job->drm.s_fence->finished); in xe_exec_ioctl()
H A Dxe_migrate.c1028 fence = dma_fence_get(&job->drm.s_fence->finished); in __xe_migrate_copy()
1449 fence = dma_fence_get(&job->drm.s_fence->finished); in xe_migrate_vram_copy_chunk()
1687 fence = dma_fence_get(&job->drm.s_fence->finished); in xe_migrate_clear()
2006 fence = dma_fence_get(&job->drm.s_fence->finished); in __xe_migrate_update_pgtables()
2292 fence = dma_fence_get(&job->drm.s_fence->finished); in xe_migrate_vram()
H A Dxe_gsc.c96 fence = dma_fence_get(&job->drm.s_fence->finished); in emit_gsc_upload()
H A Dxe_gt.c187 fence = dma_fence_get(&job->drm.s_fence->finished); in emit_job_sync()
H A Dxe_oa.c656 fence = dma_fence_get(&job->drm.s_fence->finished); in xe_oa_submit_bb()
/linux/include/trace/events/
H A Damdxdna.h45 __entry->fence_context = sched_job->s_fence->finished.context;
46 __entry->fence_seqno = sched_job->s_fence->finished.seqno;
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_sched.c26 if (likely(!sched_job->s_fence->finished.error)) in etnaviv_sched_run_job()
127 submit->out_fence = dma_fence_get(&submit->sched_job.s_fence->finished); in etnaviv_sched_push_job()
/linux/drivers/gpu/drm/imagination/
H A Dpvr_queue.c487 if (f == &job->base.s_fence->scheduled) in pvr_queue_get_paired_frag_job_dep()
635 &job->paired_job->base.s_fence->scheduled == fence) in pvr_queue_submit_job_to_cccb()
785 WARN_ON(job->base.s_fence->parent); in pvr_queue_start()
786 job->base.s_fence->parent = dma_fence_get(job->done_fence); in pvr_queue_start()
837 job->base.s_fence->parent = dma_fence_get(job->done_fence); in pvr_queue_timedout_job()
1135 return &job->base.s_fence->finished; in pvr_queue_job_arm()
1150 if (job->base.s_fence) in pvr_queue_job_cleanup()
1170 queue->last_queued_job_scheduled_fence = dma_fence_get(&job->base.s_fence->scheduled); in pvr_queue_job_push()
H A Dpvr_sync.c231 struct drm_sched_fence *s_fence = to_drm_sched_fence(uf); in pvr_sync_add_dep_to_job() local
237 dma_fence_get(&s_fence->scheduled)); in pvr_sync_add_dep_to_job()
H A Dpvr_job.c586 dma_resv_add_fence(obj->resv, &job->base.s_fence->finished, usage); in update_job_resvs()
612 &geom_job->base.s_fence->scheduled); in can_combine_jobs()
627 return dma_fence_get(&job->base.s_fence->scheduled); in get_last_queued_job_scheduled_fence()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ids.c302 r = amdgpu_sync_fence(&(*id)->active, &job->base.s_fence->finished, in amdgpu_vmid_grab_reserved()
362 &job->base.s_fence->finished, in amdgpu_vmid_grab_used()
415 &job->base.s_fence->finished, in amdgpu_vmid_grab()
H A Damdgpu_vm.c820 ring->funcs->emit_cleaner_shader && job->base.s_fence && in amdgpu_vm_flush()
821 &job->base.s_fence->scheduled == isolation->spearhead; in amdgpu_vm_flush()
H A Damdgpu_device.c6749 struct drm_sched_fence *f = job->base.s_fence; in amdgpu_device_enforce_isolation()
/linux/drivers/accel/ethosu/
H A Dethosu_job.c135 job->inference_done_fence = dma_fence_get(&job->base.s_fence->finished); in ethosu_job_push()
190 if (unlikely(job->base.s_fence->finished.error)) in ethosu_job_run()
/linux/drivers/accel/rocket/
H A Drocket_job.c205 job->inference_done_fence = dma_fence_get(&job->base.s_fence->finished); in rocket_job_push()
295 if (unlikely(job->base.s_fence->finished.error)) in rocket_job_run()
/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_job.c327 job->render_done_fence = dma_fence_get(&job->base.s_fence->finished); in panfrost_job_push()
407 if (unlikely(job->base.s_fence->finished.error)) in panfrost_job_run()
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_sched.c312 job->done_fence = dma_fence_get(&job->base.s_fence->finished); in nouveau_job_submit()
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_sched.c2738 job->base.s_fence->parent = dma_fence_get(job->done_fence); in queue_start()
3916 if (job->base.s_fence) in job_release()
4046 panthor_vm_update_resvs(job->group->vm, exec, &sched_job->s_fence->finished, in panthor_job_update_resvs()
H A Dpanthor_mmu.c2363 if (job->base.s_fence) in panthor_vm_bind_job_release()
2699 &sched_job->s_fence->finished, in panthor_vm_bind_job_update_resvs()
/linux/drivers/accel/amdxdna/
H A Daie2_ctx.c1093 job->out_fence = dma_fence_get(&job->base.s_fence->finished); in aie2_cmd_submit()
/linux/drivers/gpu/drm/v3d/
H A Dv3d_submit.c224 job->done_fence = dma_fence_get(&job->base.s_fence->finished); in v3d_push_job()