Lines Matching full:vc4

43 	struct vc4_dev *vc4 = to_vc4_dev(dev);  in vc4_queue_hangcheck()  local
45 mod_timer(&vc4->hangcheck.timer, in vc4_queue_hangcheck()
75 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_hang_state_ioctl() local
80 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_get_hang_state_ioctl()
83 if (!vc4->v3d) { in vc4_get_hang_state_ioctl()
84 DRM_DEBUG("VC4_GET_HANG_STATE with no VC4 V3D probed\n"); in vc4_get_hang_state_ioctl()
88 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl()
89 kernel_state = vc4->hang_state; in vc4_get_hang_state_ioctl()
91 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl()
101 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl()
105 vc4->hang_state = NULL; in vc4_get_hang_state_ioctl()
106 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_get_hang_state_ioctl()
155 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_save_hang_state() local
169 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_save_hang_state()
170 exec[0] = vc4_first_bin_job(vc4); in vc4_save_hang_state()
171 exec[1] = vc4_first_render_job(vc4); in vc4_save_hang_state()
173 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_save_hang_state()
193 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_save_hang_state()
231 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_save_hang_state()
279 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_save_hang_state()
280 if (vc4->hang_state) { in vc4_save_hang_state()
281 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_save_hang_state()
284 vc4->hang_state = kernel_state; in vc4_save_hang_state()
285 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_save_hang_state()
292 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_reset() local
296 mutex_lock(&vc4->power_lock); in vc4_reset()
297 if (vc4->power_refcount) { in vc4_reset()
301 pm_runtime_put_sync_suspend(&vc4->v3d->pdev->dev); in vc4_reset()
302 pm_runtime_get_sync(&vc4->v3d->pdev->dev); in vc4_reset()
304 mutex_unlock(&vc4->power_lock); in vc4_reset()
318 struct vc4_dev *vc4 = in vc4_reset_work() local
321 vc4_save_hang_state(&vc4->base); in vc4_reset_work()
323 vc4_reset(&vc4->base); in vc4_reset_work()
329 struct vc4_dev *vc4 = timer_container_of(vc4, t, hangcheck.timer); in vc4_hangcheck_elapsed() local
330 struct drm_device *dev = &vc4->base; in vc4_hangcheck_elapsed()
335 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_hangcheck_elapsed()
337 bin_exec = vc4_first_bin_job(vc4); in vc4_hangcheck_elapsed()
338 render_exec = vc4_first_render_job(vc4); in vc4_hangcheck_elapsed()
342 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_hangcheck_elapsed()
358 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_hangcheck_elapsed()
363 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_hangcheck_elapsed()
369 schedule_work(&vc4->hangcheck.reset_work); in vc4_hangcheck_elapsed()
375 struct vc4_dev *vc4 = to_vc4_dev(dev); in submit_cl() local
388 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_wait_for_seqno() local
393 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_wait_for_seqno()
396 if (vc4->finished_seqno >= seqno) in vc4_wait_for_seqno()
406 prepare_to_wait(&vc4->job_wait_queue, &wait, in vc4_wait_for_seqno()
415 if (vc4->finished_seqno >= seqno) in vc4_wait_for_seqno()
429 finish_wait(&vc4->job_wait_queue, &wait); in vc4_wait_for_seqno()
438 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_flush_caches() local
457 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_flush_texture_caches() local
475 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_submit_next_bin_job() local
478 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_submit_next_bin_job()
482 exec = vc4_first_bin_job(vc4); in vc4_submit_next_bin_job()
491 if (exec->perfmon && vc4->active_perfmon != exec->perfmon) in vc4_submit_next_bin_job()
492 vc4_perfmon_start(vc4, exec->perfmon); in vc4_submit_next_bin_job()
505 next = vc4_first_bin_job(vc4); in vc4_submit_next_bin_job()
520 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_submit_next_render_job() local
521 struct vc4_exec_info *exec = vc4_first_render_job(vc4); in vc4_submit_next_render_job()
526 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_submit_next_render_job()
544 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_move_job_to_render() local
545 bool was_empty = list_empty(&vc4->render_job_list); in vc4_move_job_to_render()
547 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_move_job_to_render()
550 list_move_tail(&exec->head, &vc4->render_job_list); in vc4_move_job_to_render()
579 * to vc4, so we don't attach dma-buf fences to them.
618 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_queue_submit() local
629 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_queue_submit()
631 seqno = ++vc4->emit_seqno; in vc4_queue_submit()
634 dma_fence_init(&fence->base, &vc4_fence_ops, &vc4->job_lock, in vc4_queue_submit()
635 vc4->dma_fence_context, exec->seqno); in vc4_queue_submit()
646 list_add_tail(&exec->head, &vc4->bin_job_list); in vc4_queue_submit()
653 renderjob = vc4_first_render_job(vc4); in vc4_queue_submit()
654 if (vc4_first_bin_job(vc4) == exec && in vc4_queue_submit()
660 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_queue_submit()
735 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_get_bcl() local
835 ret = vc4_v3d_bin_bo_get(vc4, &exec->bin_bo_used); in vc4_get_bcl()
848 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_complete_exec() local
878 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_complete_exec()
879 vc4->bin_alloc_used &= ~exec->bin_slots; in vc4_complete_exec()
880 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_complete_exec()
884 vc4_v3d_bin_bo_put(vc4); in vc4_complete_exec()
889 vc4_v3d_pm_put(vc4); in vc4_complete_exec()
895 vc4_job_handle_completed(struct vc4_dev *vc4) in vc4_job_handle_completed() argument
899 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_job_handle_completed()
902 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_job_handle_completed()
903 while (!list_empty(&vc4->job_done_list)) { in vc4_job_handle_completed()
905 list_first_entry(&vc4->job_done_list, in vc4_job_handle_completed()
909 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_job_handle_completed()
910 vc4_complete_exec(&vc4->base, exec); in vc4_job_handle_completed()
911 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_job_handle_completed()
914 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_job_handle_completed()
924 struct vc4_dev *vc4 = in vc4_job_done_work() local
927 vc4_job_handle_completed(vc4); in vc4_job_done_work()
952 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_wait_seqno_ioctl() local
955 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_wait_seqno_ioctl()
966 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_wait_bo_ioctl() local
974 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_wait_bo_ioctl()
996 * vc4_submit_cl_ioctl() - Submits a job (frame) to the VC4.
1011 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_submit_cl_ioctl() local
1024 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_submit_cl_ioctl()
1027 if (!vc4->v3d) { in vc4_submit_cl_ioctl()
1028 DRM_DEBUG("VC4_SUBMIT_CL with no VC4 V3D probed\n"); in vc4_submit_cl_ioctl()
1049 exec->dev = vc4; in vc4_submit_cl_ioctl()
1051 ret = vc4_v3d_pm_get(vc4); in vc4_submit_cl_ioctl()
1085 vc4->dma_fence_context)) { in vc4_submit_cl_ioctl()
1144 args->seqno = vc4->emit_seqno; in vc4_submit_cl_ioctl()
1151 vc4_complete_exec(&vc4->base, exec); in vc4_submit_cl_ioctl()
1159 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_gem_init() local
1162 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_gem_init()
1165 vc4->dma_fence_context = dma_fence_context_alloc(1); in vc4_gem_init()
1167 INIT_LIST_HEAD(&vc4->bin_job_list); in vc4_gem_init()
1168 INIT_LIST_HEAD(&vc4->render_job_list); in vc4_gem_init()
1169 INIT_LIST_HEAD(&vc4->job_done_list); in vc4_gem_init()
1170 spin_lock_init(&vc4->job_lock); in vc4_gem_init()
1172 INIT_WORK(&vc4->hangcheck.reset_work, vc4_reset_work); in vc4_gem_init()
1173 timer_setup(&vc4->hangcheck.timer, vc4_hangcheck_elapsed, 0); in vc4_gem_init()
1175 INIT_WORK(&vc4->job_done_work, vc4_job_done_work); in vc4_gem_init()
1177 ret = drmm_mutex_init(dev, &vc4->power_lock); in vc4_gem_init()
1181 INIT_LIST_HEAD(&vc4->purgeable.list); in vc4_gem_init()
1183 ret = drmm_mutex_init(dev, &vc4->purgeable.lock); in vc4_gem_init()
1192 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_gem_destroy() local
1197 WARN_ON(vc4->emit_seqno != vc4->finished_seqno); in vc4_gem_destroy()
1202 if (vc4->bin_bo) { in vc4_gem_destroy()
1203 drm_gem_object_put(&vc4->bin_bo->base.base); in vc4_gem_destroy()
1204 vc4->bin_bo = NULL; in vc4_gem_destroy()
1207 if (vc4->hang_state) in vc4_gem_destroy()
1208 vc4_free_hang_state(dev, vc4->hang_state); in vc4_gem_destroy()
1214 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_gem_madvise_ioctl() local
1220 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_gem_madvise_ioctl()