/linux/drivers/gpu/drm/radeon/ |
H A D | radeon_fence.c | 145 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 226 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity() 301 fence_drv->sync_seq[ring], ring); in radeon_fence_check_lockup() 635 if (seq[ring] >= rdev->fence_drv[ring].sync_seq[ring]) { in radeon_fence_wait_next() 664 seq[ring] = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_wait_empty() 727 emitted = rdev->fence_drv[ring].sync_seq[ring] in radeon_fence_count_emitted() 759 if (fence->seq <= fdrv->sync_seq[fence->ring]) in radeon_fence_need_sync() 792 dst->sync_seq[i] = max(dst->sync_seq[i], src->sync_seq[i]); in radeon_fence_note_sync() 866 rdev->fence_drv[ring].sync_seq[i] = 0; in radeon_fence_driver_init_ring() 937 radeon_fence_write(rdev, rdev->fence_drv[ring].sync_seq[ring], ring); in radeon_fence_driver_force_completion() [all …]
|
H A D | radeon.h | 368 uint64_t sync_seq[RADEON_NUM_RINGS]; member
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_fence.c | 161 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit() 232 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit_polling() 284 seq != ring->fence_drv.sync_seq) in amdgpu_fence_process() 342 uint64_t seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_wait_empty() 402 emitted += READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_count_emitted() 417 uint32_t last_seq, sync_seq; in amdgpu_fence_last_unsignaled_time_us() local 420 sync_seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_fence_last_unsignaled_time_us() 421 if (last_seq == sync_seq) in amdgpu_fence_last_unsignaled_time_us() 518 ring->fence_drv.sync_seq = 0; in amdgpu_fence_driver_init_ring() 763 amdgpu_fence_write(ring, ring->fence_drv.sync_seq); in amdgpu_fence_driver_force_completion() [all …]
|
H A D | amdgpu_ring_mux.c | 104 if (chunk->sync_seq > last_seq && chunk->sync_seq <= seq) { in amdgpu_mux_resubmit_chunks() 106 chunk->sync_seq, in amdgpu_mux_resubmit_chunks() 108 if (chunk->sync_seq == in amdgpu_mux_resubmit_chunks() 476 if (chunk->sync_seq <= last_seq) { in scan_and_remove_signaled_chunk() 536 chunk->sync_seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_ring_mux_end_ib() 569 mux->seqno_to_resubmit = ring->fence_drv.sync_seq; in amdgpu_mcbp_handle_trailing_fence_irq()
|
H A D | umsch_mm_v4_0.c | 311 set_hw_resources.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_set_hw_resources() 361 add_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_add_queue() 393 remove_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_remove_queue()
|
H A D | amdgpu_ring_mux.h | 96 uint32_t sync_seq; member
|
H A D | amdgpu_debugfs.c | 1818 uint32_t sync_seq, last_seq; in amdgpu_ib_preempt_fences_swap() local 1821 sync_seq = ring->fence_drv.sync_seq; in amdgpu_ib_preempt_fences_swap() 1824 sync_seq &= drv->num_fences_mask; in amdgpu_ib_preempt_fences_swap() 1841 } while (last_seq != sync_seq); in amdgpu_ib_preempt_fences_swap() 1955 ring->fence_drv.sync_seq) { in amdgpu_debugfs_ib_preempt()
|
H A D | amdgpu_umsch_mm.c | 511 r = amdgpu_fence_wait_polling(ring, ring->fence_drv.sync_seq, adev->usec_timeout); in amdgpu_umsch_mm_query_fence() 514 ring->fence_drv.sync_seq); in amdgpu_umsch_mm_query_fence()
|
H A D | amdgpu_job.c | 125 ring->fence_drv.sync_seq); in amdgpu_job_timedout()
|
H A D | amdgpu_ring.h | 118 uint32_t sync_seq; member
|
H A D | vce_v3_0.c | 887 uint32_t seq = ring->fence_drv.sync_seq; in vce_v3_0_emit_pipeline_sync()
|
H A D | sdma_v3_0.c | 1031 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v3_0_ring_emit_pipeline_sync()
|
H A D | mes_v12_0.c | 186 seq = ++ring->fence_drv.sync_seq; in mes_v12_0_submit_pkt_and_poll_completion()
|
H A D | sdma_v7_0.c | 1195 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v7_0_ring_emit_pipeline_sync()
|
H A D | mes_v11_0.c | 200 seq = ++ring->fence_drv.sync_seq; in mes_v11_0_submit_pkt_and_poll_completion()
|
H A D | sdma_v6_0.c | 1194 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v6_0_ring_emit_pipeline_sync()
|
H A D | sdma_v5_2.c | 1183 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v5_2_ring_emit_pipeline_sync()
|
H A D | sdma_v5_0.c | 1326 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v5_0_ring_emit_pipeline_sync()
|
H A D | sdma_v4_4_2.c | 1240 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v4_4_2_ring_emit_pipeline_sync()
|
H A D | gfx_v7_0.c | 3100 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v7_0_ring_emit_pipeline_sync() 4986 ring->fence_drv.sync_seq, AMDGPU_FENCE_FLAG_EXEC); in gfx_v7_0_reset_kgq()
|
H A D | sdma_v4_0.c | 1695 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v4_0_ring_emit_pipeline_sync()
|
H A D | gfx_v8_0.c | 6197 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v8_0_ring_emit_pipeline_sync() 6932 ring->fence_drv.sync_seq, AMDGPU_FENCE_FLAG_EXEC); in gfx_v8_0_reset_kgq()
|
H A D | gfx_v9_0.c | 5610 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v9_0_ring_emit_pipeline_sync() 7260 ring->fence_drv.sync_seq, AMDGPU_FENCE_FLAG_EXEC); in gfx_v9_0_reset_kgq()
|
/linux/drivers/md/ |
H A D | md.h | 589 atomic_t sync_seq; member
|
H A D | md.c | 679 atomic_set(&mddev->sync_seq, 0); in mddev_init() 4881 int sync_seq = atomic_read(&mddev->sync_seq); in stop_sync_thread() local 4903 sync_seq != atomic_read(&mddev->sync_seq))); in stop_sync_thread() 9727 atomic_inc(&mddev->sync_seq); in md_reap_sync_thread()
|