Lines Matching full:gpu
25 static inline void set_preempt_state(struct a5xx_gpu *gpu, in set_preempt_state() argument
34 atomic_set(&gpu->preempt_state, new); in set_preempt_state()
40 static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_wptr() argument
52 gpu_write(gpu, REG_A5XX_CP_RB_WPTR, wptr); in update_wptr()
56 static struct msm_ringbuffer *get_next_ring(struct msm_gpu *gpu) in get_next_ring() argument
58 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in get_next_ring()
63 for (i = 0; i < gpu->nr_rings; i++) { in get_next_ring()
65 struct msm_ringbuffer *ring = gpu->rb[i]; in get_next_ring()
68 empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring)); in get_next_ring()
84 struct msm_gpu *gpu = &a5xx_gpu->base.base; in a5xx_preempt_timer() local
85 struct drm_device *dev = gpu->dev; in a5xx_preempt_timer()
90 DRM_DEV_ERROR(dev->dev, "%s: preemption timed out\n", gpu->name); in a5xx_preempt_timer()
91 kthread_queue_work(gpu->worker, &gpu->recover_work); in a5xx_preempt_timer()
95 void a5xx_preempt_trigger(struct msm_gpu *gpu) in a5xx_preempt_trigger() argument
97 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_trigger()
102 if (gpu->nr_rings == 1) in a5xx_preempt_trigger()
120 ring = get_next_ring(gpu); in a5xx_preempt_trigger()
140 update_wptr(gpu, a5xx_gpu->cur_ring); in a5xx_preempt_trigger()
153 gpu_write64(gpu, REG_A5XX_CP_CONTEXT_SWITCH_RESTORE_ADDR_LO, in a5xx_preempt_trigger()
168 gpu_write(gpu, REG_A5XX_CP_CONTEXT_SWITCH_CNTL, 1); in a5xx_preempt_trigger()
175 void a5xx_preempt_irq(struct msm_gpu *gpu) in a5xx_preempt_irq() argument
178 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_irq()
180 struct drm_device *dev = gpu->dev; in a5xx_preempt_irq()
194 status = gpu_read(gpu, REG_A5XX_CP_CONTEXT_SWITCH_CNTL); in a5xx_preempt_irq()
198 gpu->name); in a5xx_preempt_irq()
199 kthread_queue_work(gpu->worker, &gpu->recover_work); in a5xx_preempt_irq()
206 update_wptr(gpu, a5xx_gpu->cur_ring); in a5xx_preempt_irq()
214 a5xx_preempt_trigger(gpu); in a5xx_preempt_irq()
217 void a5xx_preempt_hw_init(struct msm_gpu *gpu) in a5xx_preempt_hw_init() argument
219 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_hw_init()
224 a5xx_gpu->cur_ring = gpu->rb[0]; in a5xx_preempt_hw_init()
227 if (gpu->nr_rings == 1) in a5xx_preempt_hw_init()
230 for (i = 0; i < gpu->nr_rings; i++) { in a5xx_preempt_hw_init()
235 a5xx_gpu->preempt[i]->rbase = gpu->rb[i]->iova; in a5xx_preempt_hw_init()
236 a5xx_gpu->preempt[i]->rptr_addr = shadowptr(a5xx_gpu, gpu->rb[i]); in a5xx_preempt_hw_init()
240 gpu_write64(gpu, REG_A5XX_CP_CONTEXT_SWITCH_SMMU_INFO_LO, 0); in a5xx_preempt_hw_init()
250 struct msm_gpu *gpu = &adreno_gpu->base; in preempt_init_ring() local
256 ptr = msm_gem_kernel_new(gpu->dev, in preempt_init_ring()
258 MSM_BO_WC | MSM_BO_MAP_PRIV, gpu->vm, &bo, &iova); in preempt_init_ring()
264 counters = msm_gem_kernel_new(gpu->dev, in preempt_init_ring()
266 MSM_BO_WC, gpu->vm, &counters_bo, &counters_iova); in preempt_init_ring()
268 msm_gem_kernel_put(bo, gpu->vm); in preempt_init_ring()
292 void a5xx_preempt_fini(struct msm_gpu *gpu) in a5xx_preempt_fini() argument
294 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_fini()
298 for (i = 0; i < gpu->nr_rings; i++) { in a5xx_preempt_fini()
299 msm_gem_kernel_put(a5xx_gpu->preempt_bo[i], gpu->vm); in a5xx_preempt_fini()
300 msm_gem_kernel_put(a5xx_gpu->preempt_counters_bo[i], gpu->vm); in a5xx_preempt_fini()
304 void a5xx_preempt_init(struct msm_gpu *gpu) in a5xx_preempt_init() argument
306 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_init()
311 if (gpu->nr_rings <= 1) in a5xx_preempt_init()
314 for (i = 0; i < gpu->nr_rings; i++) { in a5xx_preempt_init()
315 if (preempt_init_ring(a5xx_gpu, gpu->rb[i])) { in a5xx_preempt_init()
320 a5xx_preempt_fini(gpu); in a5xx_preempt_init()
321 gpu->nr_rings = 1; in a5xx_preempt_init()