Lines Matching refs:ring

67 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring)  in radeon_fence_write()  argument
69 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
88 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
90 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
112 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
119 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
135 int ring) in radeon_fence_emit() argument
145 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
146 (*fence)->ring = ring; in radeon_fence_emit()
150 rdev->fence_context + ring, in radeon_fence_emit()
152 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit()
153 trace_radeon_fence_emit(rdev_to_drm(rdev), ring, (*fence)->seq); in radeon_fence_emit()
154 radeon_fence_schedule_check(rdev, ring); in radeon_fence_emit()
177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled()
180 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled()
197 static bool radeon_fence_activity(struct radeon_device *rdev, int ring) in radeon_fence_activity() argument
224 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity()
226 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity()
227 seq = radeon_fence_read(rdev, ring); in radeon_fence_activity()
251 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity()
254 radeon_fence_schedule_check(rdev, ring); in radeon_fence_activity()
271 int ring; in radeon_fence_check_lockup() local
276 ring = fence_drv - &rdev->fence_drv[0]; in radeon_fence_check_lockup()
280 radeon_fence_schedule_check(rdev, ring); in radeon_fence_check_lockup()
293 if (radeon_fence_activity(rdev, ring)) in radeon_fence_check_lockup()
296 else if (radeon_ring_is_lockup(rdev, ring, &rdev->ring[ring])) { in radeon_fence_check_lockup()
301 fence_drv->sync_seq[ring], ring); in radeon_fence_check_lockup()
319 void radeon_fence_process(struct radeon_device *rdev, int ring) in radeon_fence_process() argument
321 if (radeon_fence_activity(rdev, ring)) in radeon_fence_process()
340 u64 seq, unsigned int ring) in radeon_fence_seq_signaled() argument
342 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) in radeon_fence_seq_signaled()
346 radeon_fence_process(rdev, ring); in radeon_fence_seq_signaled()
347 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) in radeon_fence_seq_signaled()
357 unsigned int ring = fence->ring; in radeon_fence_is_signaled() local
360 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) in radeon_fence_is_signaled()
364 radeon_fence_process(rdev, ring); in radeon_fence_is_signaled()
367 if (atomic64_read(&rdev->fence_drv[ring].last_seq) >= seq) in radeon_fence_is_signaled()
386 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) in radeon_fence_enable_signaling()
390 radeon_irq_kms_sw_irq_get(rdev, fence->ring); in radeon_fence_enable_signaling()
392 if (radeon_fence_activity(rdev, fence->ring)) in radeon_fence_enable_signaling()
396 if (atomic64_read(&rdev->fence_drv[fence->ring].last_seq) >= fence->seq) { in radeon_fence_enable_signaling()
397 radeon_irq_kms_sw_irq_put(rdev, fence->ring); in radeon_fence_enable_signaling()
405 if (radeon_irq_kms_sw_irq_get_delayed(rdev, fence->ring)) in radeon_fence_enable_signaling()
406 rdev->fence_drv[fence->ring].delayed_irq = true; in radeon_fence_enable_signaling()
407 radeon_fence_schedule_check(rdev, fence->ring); in radeon_fence_enable_signaling()
431 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled()
547 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout()
629 int radeon_fence_wait_next(struct radeon_device *rdev, int ring) in radeon_fence_wait_next() argument
634 seq[ring] = atomic64_read(&rdev->fence_drv[ring].last_seq) + 1ULL; in radeon_fence_wait_next()
635 if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) { in radeon_fence_wait_next()
659 int radeon_fence_wait_empty(struct radeon_device *rdev, int ring) in radeon_fence_wait_empty() argument
664 seq[ring] = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_wait_empty()
665 if (!seq[ring]) in radeon_fence_wait_empty()
674 ring, r); in radeon_fence_wait_empty()
719 unsigned int radeon_fence_count_emitted(struct radeon_device *rdev, int ring) in radeon_fence_count_emitted() argument
726 radeon_fence_process(rdev, ring); in radeon_fence_count_emitted()
727 emitted = rdev->fence_drv[ring].sync_seq[ring] in radeon_fence_count_emitted()
728 - atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_count_emitted()
754 if (fence->ring == dst_ring) in radeon_fence_need_sync()
759 if (fence->seq <= fdrv->sync_seq[fence->ring]) in radeon_fence_need_sync()
782 if (fence->ring == dst_ring) in radeon_fence_note_sync()
786 src = &fence->rdev->fence_drv[fence->ring]; in radeon_fence_note_sync()
808 int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring) in radeon_fence_driver_start_ring() argument
813 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_start_ring()
814 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring()
815 rdev->fence_drv[ring].scratch_reg = 0; in radeon_fence_driver_start_ring()
816 if (ring != R600_RING_TYPE_UVD_INDEX) { in radeon_fence_driver_start_ring()
817 index = R600_WB_EVENT_OFFSET + ring * 4; in radeon_fence_driver_start_ring()
818 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
819 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
825 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; in radeon_fence_driver_start_ring()
826 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
830 r = radeon_scratch_get(rdev, &rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_start_ring()
836 rdev->fence_drv[ring].scratch_reg - in radeon_fence_driver_start_ring()
838 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
839 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
841 radeon_fence_write(rdev, atomic64_read(&rdev->fence_drv[ring].last_seq), ring); in radeon_fence_driver_start_ring()
842 rdev->fence_drv[ring].initialized = true; in radeon_fence_driver_start_ring()
844 ring, rdev->fence_drv[ring].gpu_addr); in radeon_fence_driver_start_ring()
858 static void radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring) in radeon_fence_driver_init_ring() argument
862 rdev->fence_drv[ring].scratch_reg = -1; in radeon_fence_driver_init_ring()
863 rdev->fence_drv[ring].cpu_addr = NULL; in radeon_fence_driver_init_ring()
864 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
866 rdev->fence_drv[ring].sync_seq[i] = 0; in radeon_fence_driver_init_ring()
867 atomic64_set(&rdev->fence_drv[ring].last_seq, 0); in radeon_fence_driver_init_ring()
868 rdev->fence_drv[ring].initialized = false; in radeon_fence_driver_init_ring()
869 INIT_DELAYED_WORK(&rdev->fence_drv[ring].lockup_work, in radeon_fence_driver_init_ring()
871 rdev->fence_drv[ring].rdev = rdev; in radeon_fence_driver_init_ring()
887 int ring; in radeon_fence_driver_init() local
890 for (ring = 0; ring < RADEON_NUM_RINGS; ring++) in radeon_fence_driver_init()
891 radeon_fence_driver_init_ring(rdev, ring); in radeon_fence_driver_init()
906 int ring, r; in radeon_fence_driver_fini() local
909 for (ring = 0; ring < RADEON_NUM_RINGS; ring++) { in radeon_fence_driver_fini()
910 if (!rdev->fence_drv[ring].initialized) in radeon_fence_driver_fini()
912 r = radeon_fence_wait_empty(rdev, ring); in radeon_fence_driver_fini()
915 radeon_fence_driver_force_completion(rdev, ring); in radeon_fence_driver_fini()
917 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); in radeon_fence_driver_fini()
919 radeon_scratch_free(rdev, rdev->fence_drv[ring].scratch_reg); in radeon_fence_driver_fini()
920 rdev->fence_drv[ring].initialized = false; in radeon_fence_driver_fini()
934 void radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring) in radeon_fence_driver_force_completion() argument
936 if (rdev->fence_drv[ring].initialized) { in radeon_fence_driver_force_completion()
937 radeon_fence_write(rdev, rdev->fence_drv[ring].sync_seq[ring], ring); in radeon_fence_driver_force_completion()
938 cancel_delayed_work_sync(&rdev->fence_drv[ring].lockup_work); in radeon_fence_driver_force_completion()
1018 switch (fence->ring) { in radeon_fence_get_timeline_name()