Home
last modified time | relevance | path

Searched refs:drm_sched_entity (Results 1 – 25 of 25) sorted by relevance

/linux/include/drm/
H A Dgpu_scheduler.h82 struct drm_sched_entity { struct
256 struct drm_sched_entity *current_entity; argument
358 struct drm_sched_entity *entity;
424 struct drm_sched_entity *s_entity);
656 struct drm_sched_entity *entity,
686 int drm_sched_entity_init(struct drm_sched_entity *entity,
691 long drm_sched_entity_flush(struct drm_sched_entity *entity, long timeout);
692 void drm_sched_entity_fini(struct drm_sched_entity *entity);
693 void drm_sched_entity_destroy(struct drm_sched_entity *entity);
694 void drm_sched_entity_set_priority(struct drm_sched_entity *entity,
[all …]
/linux/drivers/gpu/drm/scheduler/
H A Dsched_entity.c58 int drm_sched_entity_init(struct drm_sched_entity *entity, in drm_sched_entity_init()
67 memset(entity, 0, sizeof(struct drm_sched_entity)); in drm_sched_entity_init()
129 void drm_sched_entity_modify_sched(struct drm_sched_entity *entity, in drm_sched_entity_modify_sched()
142 static bool drm_sched_entity_is_idle(struct drm_sched_entity *entity) in drm_sched_entity_is_idle()
161 int drm_sched_entity_error(struct drm_sched_entity *entity) in drm_sched_entity_error()
227 static void drm_sched_entity_kill(struct drm_sched_entity *entity) in drm_sched_entity_kill()
278 long drm_sched_entity_flush(struct drm_sched_entity *entity, long timeout) in drm_sched_entity_flush()
324 void drm_sched_entity_fini(struct drm_sched_entity *entity) in drm_sched_entity_fini()
351 void drm_sched_entity_destroy(struct drm_sched_entity *entity) in drm_sched_entity_destroy()
365 struct drm_sched_entity *entity = in drm_sched_entity_wakeup()
[all …]
H A Dsched_main.c116 struct drm_sched_entity *entity) in drm_sched_can_queue()
139 struct drm_sched_entity *ent_a = rb_entry((a), struct drm_sched_entity, rb_tree_node); in drm_sched_entity_compare_before()
140 struct drm_sched_entity *ent_b = rb_entry((b), struct drm_sched_entity, rb_tree_node); in drm_sched_entity_compare_before()
145 static void drm_sched_rq_remove_fifo_locked(struct drm_sched_entity *entity, in drm_sched_rq_remove_fifo_locked()
154 void drm_sched_rq_update_fifo_locked(struct drm_sched_entity *entity, in drm_sched_rq_update_fifo_locked()
201 struct drm_sched_entity *entity) in drm_sched_rq_add_entity()
222 struct drm_sched_entity *entity) in drm_sched_rq_remove_entity()
255 static struct drm_sched_entity *
259 struct drm_sched_entity *entity; in drm_sched_rq_select_entity_rr()
312 static struct drm_sched_entity *
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ctx.h41 struct drm_sched_entity entity;
77 u32 ring, struct drm_sched_entity **entity);
79 struct drm_sched_entity *entity,
82 struct drm_sched_entity *entity,
91 struct drm_sched_entity *entity);
H A Damdgpu_job.h93 struct drm_sched_entity *entity, void *owner,
97 struct drm_sched_entity *entity, void *owner,
H A Damdgpu_job.c185 struct drm_sched_entity *entity, void *owner, in amdgpu_job_alloc()
210 struct drm_sched_entity *entity, void *owner, in amdgpu_job_alloc_with_ib()
341 struct drm_sched_entity *s_entity) in amdgpu_job_prepare_job()
423 drm_sched_entity_queue_pop(struct drm_sched_entity *entity) in drm_sched_entity_queue_pop()
437 struct drm_sched_entity *s_entity = NULL; in amdgpu_job_stop_all_jobs_on_sched()
H A Damdgpu_ctx.c435 u32 ring, struct drm_sched_entity **entity) in amdgpu_ctx_get_entity()
438 struct drm_sched_entity *ctx_entity; in amdgpu_ctx_get_entity()
758 struct drm_sched_entity *entity, in amdgpu_ctx_add_fence()
785 struct drm_sched_entity *entity, in amdgpu_ctx_get_fence()
860 struct drm_sched_entity *entity) in amdgpu_ctx_wait_prev_fence()
908 struct drm_sched_entity *entity; in amdgpu_ctx_mgr_entity_flush()
938 struct drm_sched_entity *entity; in amdgpu_ctx_mgr_entity_fini()
H A Damdgpu_cs.h61 struct drm_sched_entity *entities[AMDGPU_CS_GANG_SIZE];
H A Damdgpu_vce.h51 struct drm_sched_entity entity;
H A Damdgpu_uvd.h65 struct drm_sched_entity entity;
H A Damdgpu_vm_sdma.c47 struct drm_sched_entity *entity = p->immediate ? &p->vm->immediate in amdgpu_vm_sdma_alloc_job()
H A Damdgpu_cs.c76 struct drm_sched_entity *entity; in amdgpu_cs_job_idx()
422 struct drm_sched_entity *entity; in amdgpu_cs_p2_dependencies()
1133 struct drm_sched_entity *entity = p->entities[i]; in amdgpu_cs_vm_handling()
1518 struct drm_sched_entity *entity; in amdgpu_cs_wait_ioctl()
1566 struct drm_sched_entity *entity; in amdgpu_cs_get_fence()
H A Damdgpu_gfx.c1453 struct drm_sched_entity entity; in amdgpu_gfx_run_cleaner_shader_job()
/linux/drivers/gpu/drm/scheduler/tests/
H A Dsched_tests.h70 struct drm_sched_entity base;
121 drm_sched_entity_to_mock_entity(struct drm_sched_entity *sched_entity) in drm_sched_entity_to_mock_entity()
/linux/drivers/gpu/drm/xe/
H A Dxe_gpu_scheduler_types.h54 #define xe_sched_entity drm_sched_entity
H A Dxe_execlist_types.h39 struct drm_sched_entity entity;
/linux/drivers/gpu/drm/msm/
H A Dmsm_gpu.h455 struct drm_sched_entity *entities[NR_SCHED_PRIORITIES * MSM_GPU_MAX_RINGS];
567 struct drm_sched_entity *entity;
570 struct drm_sched_entity _vm_bind_entity[0];
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_sched.h101 struct drm_sched_entity entity;
H A Dnouveau_sched.c407 struct drm_sched_entity *entity = &sched->entity; in nouveau_sched_init()
490 struct drm_sched_entity *entity = &sched->entity; in nouveau_sched_fini()
/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_drv.h34 struct drm_sched_entity sched_entity[ETNA_MAX_PIPES];
/linux/drivers/gpu/drm/v3d/
H A Dv3d_drv.h232 struct drm_sched_entity sched_entity[V3D_MAX_QUEUES];
/linux/drivers/gpu/drm/panfrost/
H A Dpanfrost_job.c949 struct drm_sched_entity *entity = &panfrost_priv->sched_entity[i]; in panfrost_job_close()
/linux/drivers/gpu/drm/imagination/
H A Dpvr_queue.c506 struct drm_sched_entity *s_entity) in pvr_queue_prepare_job()
/linux/drivers/gpu/drm/panthor/
H A Dpanthor_mmu.c259 struct drm_sched_entity entity;
H A Dpanthor_sched.c361 struct drm_sched_entity entity;