Lines Matching full:vc4
63 struct vc4_dev *vc4 = in vc4_overflow_mem_work() local
70 mutex_lock(&vc4->bin_bo_lock); in vc4_overflow_mem_work()
72 if (!vc4->bin_bo) in vc4_overflow_mem_work()
75 bo = vc4->bin_bo; in vc4_overflow_mem_work()
77 bin_bo_slot = vc4_v3d_get_bin_slot(vc4); in vc4_overflow_mem_work()
79 drm_err(&vc4->base, "Couldn't allocate binner overflow mem\n"); in vc4_overflow_mem_work()
83 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_overflow_mem_work()
85 if (vc4->bin_alloc_overflow) { in vc4_overflow_mem_work()
92 exec = vc4_first_bin_job(vc4); in vc4_overflow_mem_work()
94 exec = vc4_last_render_job(vc4); in vc4_overflow_mem_work()
96 exec->bin_slots |= vc4->bin_alloc_overflow; in vc4_overflow_mem_work()
101 vc4->bin_alloc_used &= ~vc4->bin_alloc_overflow; in vc4_overflow_mem_work()
104 vc4->bin_alloc_overflow = BIT(bin_bo_slot); in vc4_overflow_mem_work()
106 V3D_WRITE(V3D_BPOA, bo->base.dma_addr + bin_bo_slot * vc4->bin_alloc_size); in vc4_overflow_mem_work()
110 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_overflow_mem_work()
113 mutex_unlock(&vc4->bin_bo_lock); in vc4_overflow_mem_work()
119 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_finish_bin_job() local
120 struct vc4_exec_info *next, *exec = vc4_first_bin_job(vc4); in vc4_irq_finish_bin_job()
128 next = vc4_first_bin_job(vc4); in vc4_irq_finish_bin_job()
141 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_cancel_bin_job() local
142 struct vc4_exec_info *exec = vc4_first_bin_job(vc4); in vc4_cancel_bin_job()
149 vc4_perfmon_stop(vc4, exec->perfmon, false); in vc4_cancel_bin_job()
151 list_move_tail(&exec->head, &vc4->bin_job_list); in vc4_cancel_bin_job()
158 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_finish_render_job() local
159 struct vc4_exec_info *exec = vc4_first_render_job(vc4); in vc4_irq_finish_render_job()
167 vc4->finished_seqno++; in vc4_irq_finish_render_job()
168 list_move_tail(&exec->head, &vc4->job_done_list); in vc4_irq_finish_render_job()
170 nextbin = vc4_first_bin_job(vc4); in vc4_irq_finish_render_job()
171 nextrender = vc4_first_render_job(vc4); in vc4_irq_finish_render_job()
178 vc4_perfmon_stop(vc4, exec->perfmon, true); in vc4_irq_finish_render_job()
198 wake_up_all(&vc4->job_wait_queue); in vc4_irq_finish_render_job()
199 schedule_work(&vc4->job_done_work); in vc4_irq_finish_render_job()
206 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq() local
223 schedule_work(&vc4->overflow_mem_work); in vc4_irq()
228 spin_lock(&vc4->job_lock); in vc4_irq()
230 spin_unlock(&vc4->job_lock); in vc4_irq()
235 spin_lock(&vc4->job_lock); in vc4_irq()
237 spin_unlock(&vc4->job_lock); in vc4_irq()
247 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_prepare() local
249 if (!vc4->v3d) in vc4_irq_prepare()
252 init_waitqueue_head(&vc4->job_wait_queue); in vc4_irq_prepare()
253 INIT_WORK(&vc4->overflow_mem_work, vc4_overflow_mem_work); in vc4_irq_prepare()
264 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_enable() local
266 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_irq_enable()
269 if (!vc4->v3d) in vc4_irq_enable()
281 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_disable() local
283 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_irq_disable()
286 if (!vc4->v3d) in vc4_irq_disable()
296 synchronize_irq(vc4->irq); in vc4_irq_disable()
298 cancel_work_sync(&vc4->overflow_mem_work); in vc4_irq_disable()
303 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_install() local
306 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_irq_install()
325 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_uninstall() local
327 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_irq_uninstall()
331 free_irq(vc4->irq, dev); in vc4_irq_uninstall()
337 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_irq_reset() local
340 if (WARN_ON_ONCE(vc4->gen > VC4_GEN_4)) in vc4_irq_reset()
354 spin_lock_irqsave(&vc4->job_lock, irqflags); in vc4_irq_reset()
357 spin_unlock_irqrestore(&vc4->job_lock, irqflags); in vc4_irq_reset()