| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_fence.c | 65 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write() 81 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read() 114 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit() 116 &ring->fence_drv.lock, in amdgpu_fence_emit() 119 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit() 123 ptr = &ring->fence_drv.fences[seq & ring->fence_drv.num_fences_mask]; in amdgpu_fence_emit() 169 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit_polling() 171 seq - ring->fence_drv.num_fences_mask, in amdgpu_fence_emit_polling() 176 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling() 193 mod_timer(&ring->fence_drv.fallback_timer, in amdgpu_fence_schedule_fallback() [all …]
|
| H A D | umsch_mm_v4_0.c | 310 set_hw_resources.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_set_hw_resources() 311 set_hw_resources.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_set_hw_resources() 360 add_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_add_queue() 361 add_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_add_queue() 392 remove_queue.api_status.api_completion_fence_addr = umsch->ring.fence_drv.gpu_addr; in umsch_mm_v4_0_remove_queue() 393 remove_queue.api_status.api_completion_fence_value = ++umsch->ring.fence_drv.sync_seq; in umsch_mm_v4_0_remove_queue()
|
| H A D | amdgpu_ring_mux.c | 99 last_seq = atomic_read(&e->ring->fence_drv.last_seq); in amdgpu_mux_resubmit_chunks() 109 le32_to_cpu(*(e->ring->fence_drv.cpu_addr + 2))) { in amdgpu_mux_resubmit_chunks() 474 last_seq = atomic_read(&ring->fence_drv.last_seq); in scan_and_remove_signaled_chunk() 537 chunk->sync_seq = READ_ONCE(ring->fence_drv.sync_seq); in amdgpu_ring_mux_end_ib() 570 mux->seqno_to_resubmit = ring->fence_drv.sync_seq; in amdgpu_mcbp_handle_trailing_fence_irq()
|
| H A D | amdgpu_job.c | 123 job->base.sched->name, atomic_read(&ring->fence_drv.last_seq), in amdgpu_job_timedout() 124 ring->fence_drv.sync_seq); in amdgpu_job_timedout()
|
| H A D | sdma_v6_0.c | 1168 uint32_t seq = ring->fence_drv.sync_seq; in sdma_v6_0_ring_emit_pipeline_sync() 1169 uint64_t addr = ring->fence_drv.gpu_addr; in sdma_v6_0_ring_emit_pipeline_sync() 1655 struct amdgpu_userq_fence_driver *fence_drv = NULL; in sdma_v6_0_process_fence_irq() local 1662 fence_drv = xa_load(xa, doorbell_offset); in sdma_v6_0_process_fence_irq() 1663 if (fence_drv) in sdma_v6_0_process_fence_irq() 1664 amdgpu_userq_fence_driver_process(fence_drv); in sdma_v6_0_process_fence_irq()
|
| H A D | amdgpu_vpe.c | 569 uint32_t seq = ring->fence_drv.sync_seq; in vpe_ring_emit_pipeline_sync() 570 uint64_t addr = ring->fence_drv.gpu_addr; in vpe_ring_emit_pipeline_sync()
|
| H A D | mes_userqueue.c | 285 userq_props->fence_address = queue->fence_drv->gpu_addr; in mes_userq_mqd_create()
|
| H A D | mes_v11_0.c | 206 seq = ++ring->fence_drv.sync_seq; in mes_v11_0_submit_pkt_and_poll_completion() 208 seq - ring->fence_drv.num_fences_mask, in mes_v11_0_submit_pkt_and_poll_completion() 224 ring->fence_drv.gpu_addr; in mes_v11_0_submit_pkt_and_poll_completion()
|
| H A D | mes_v12_0.c | 191 seq = ++ring->fence_drv.sync_seq; in mes_v12_0_submit_pkt_and_poll_completion() 193 seq - ring->fence_drv.num_fences_mask, in mes_v12_0_submit_pkt_and_poll_completion() 209 ring->fence_drv.gpu_addr; in mes_v12_0_submit_pkt_and_poll_completion()
|
| H A D | amdgpu_ring.h | 301 struct amdgpu_fence_driver fence_drv; member
|
| H A D | gfx_v12_0.c | 4491 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v12_0_ring_emit_pipeline_sync() 4492 uint64_t addr = ring->fence_drv.gpu_addr; in gfx_v12_0_ring_emit_pipeline_sync() 4840 struct amdgpu_userq_fence_driver *fence_drv = NULL; in gfx_v12_0_eop_irq() local 4845 fence_drv = xa_load(xa, doorbell_offset); in gfx_v12_0_eop_irq() 4846 if (fence_drv) in gfx_v12_0_eop_irq() 4847 amdgpu_userq_fence_driver_process(fence_drv); in gfx_v12_0_eop_irq()
|
| H A D | gfx_v11_0.c | 5964 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v11_0_ring_emit_pipeline_sync() 5965 uint64_t addr = ring->fence_drv.gpu_addr; in gfx_v11_0_ring_emit_pipeline_sync() 6453 struct amdgpu_userq_fence_driver *fence_drv = NULL; in gfx_v11_0_eop_irq() local 6458 fence_drv = xa_load(xa, doorbell_offset); in gfx_v11_0_eop_irq() 6459 if (fence_drv) in gfx_v11_0_eop_irq() 6460 amdgpu_userq_fence_driver_process(fence_drv); in gfx_v11_0_eop_irq()
|
| H A D | gfx_v6_0.c | 2289 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v6_0_ring_emit_pipeline_sync() 2290 uint64_t addr = ring->fence_drv.gpu_addr; in gfx_v6_0_ring_emit_pipeline_sync()
|
| H A D | gfx_v7_0.c | 3109 uint32_t seq = ring->fence_drv.sync_seq; in gfx_v7_0_ring_emit_pipeline_sync() 3110 uint64_t addr = ring->fence_drv.gpu_addr; in gfx_v7_0_ring_emit_pipeline_sync()
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_fence.c | 69 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write() 90 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read() 119 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check() 145 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 224 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity() 226 last_emitted = rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_activity() 251 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity() 269 struct radeon_fence_driver *fence_drv; in radeon_fence_check_lockup() local 273 fence_drv = container_of(work, struct radeon_fence_driver, in radeon_fence_check_lockup() [all …]
|
| H A D | uvd_v2_2.c | 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
|
| H A D | evergreen_dma.c | 44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
|
| H A D | uvd_v1_0.c | 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
|
| H A D | r600_dma.c | 290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
|
| H A D | cik_sdma.c | 203 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
|
| H A D | ni.c | 1380 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit()
|
| H A D | r600.c | 2878 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_fence_ring_emit() 2907 …radeon_ring_write(ring, ((rdev->fence_drv[fence->ring].scratch_reg - PACKET3_SET_CONFIG_REG_OFFSET… in r600_fence_ring_emit()
|
| /linux/drivers/gpu/drm/virtio/ |
| H A D | virtgpu_ioctl.c | 172 fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0); in virtio_gpu_resource_create_ioctl() 259 fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0); in virtio_gpu_transfer_from_host_ioctl() 319 fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, in virtio_gpu_transfer_to_host_ioctl()
|
| H A D | virtgpu_submit.c | 478 u64 fence_ctx = vgdev->fence_drv.context; in virtio_gpu_execbuffer_ioctl()
|