Lines Matching refs:kiq

3808 	adev->gfx.kiq[0].pmf = &gfx_v10_0_kiq_pm4_funcs;  in gfx_v10_0_set_kiq_pm4_funcs()
4767 &adev->gfx.kiq[0].irq); in gfx_v10_0_sw_init()
4924 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v10_0_sw_fini()
6532 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v10_0_cp_compute_enable()
7006 (adev->doorbell_index.kiq * 2) << 2); in gfx_v10_0_kiq_init_register()
7045 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue()
7046 memcpy_toio(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(*mqd)); in gfx_v10_0_kiq_init_queue()
7068 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue()
7069 memcpy_fromio(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(*mqd)); in gfx_v10_0_kiq_init_queue()
7109 ring = &adev->gfx.kiq[0].ring; in gfx_v10_0_kiq_resume()
8805 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v10_0_ring_preempt_ib() local
8806 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_ring_preempt_ib()
8809 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v10_0_ring_preempt_ib()
8812 spin_lock_irqsave(&kiq->ring_lock, flags); in gfx_v10_0_ring_preempt_ib()
8814 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_ring_preempt_ib()
8815 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_ring_preempt_ib()
8823 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, PREEMPT_QUEUES_NO_UNMAP, in gfx_v10_0_ring_preempt_ib()
8828 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_ring_preempt_ib()
9369 struct amdgpu_ring *ring = &(adev->gfx.kiq[0].ring); in gfx_v10_0_kiq_set_interrupt_state()
9413 struct amdgpu_ring *ring = &(adev->gfx.kiq[0].ring); in gfx_v10_0_kiq_irq()
9466 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v10_0_reset_kgq() local
9467 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_reset_kgq()
9476 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v10_0_reset_kgq()
9479 spin_lock_irqsave(&kiq->ring_lock, flags); in gfx_v10_0_reset_kgq()
9481 if (amdgpu_ring_alloc(kiq_ring, 5 + 7 + 7 + kiq->pmf->map_queues_size)) { in gfx_v10_0_reset_kgq()
9482 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_reset_kgq()
9501 kiq->pmf->kiq_map_queues(kiq_ring, ring); in gfx_v10_0_reset_kgq()
9504 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_reset_kgq()
9534 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v10_0_reset_kcq() local
9535 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v10_0_reset_kcq()
9542 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v10_0_reset_kcq()
9545 spin_lock_irqsave(&kiq->ring_lock, flags); in gfx_v10_0_reset_kcq()
9547 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_reset_kcq()
9548 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_reset_kcq()
9552 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, in gfx_v10_0_reset_kcq()
9555 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_reset_kcq()
9597 spin_lock_irqsave(&kiq->ring_lock, flags); in gfx_v10_0_reset_kcq()
9598 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size)) { in gfx_v10_0_reset_kcq()
9599 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_reset_kcq()
9602 kiq->pmf->kiq_map_queues(kiq_ring, ring); in gfx_v10_0_reset_kcq()
9604 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v10_0_reset_kcq()
9904 adev->gfx.kiq[0].ring.funcs = &gfx_v10_0_ring_funcs_kiq; in gfx_v10_0_set_ring_funcs()
9943 adev->gfx.kiq[0].irq.num_types = AMDGPU_CP_KIQ_IRQ_LAST; in gfx_v10_0_set_irq_funcs()
9944 adev->gfx.kiq[0].irq.funcs = &gfx_v10_0_kiq_irq_funcs; in gfx_v10_0_set_irq_funcs()