/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_gfx.c | 302 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init_ring() local 303 struct amdgpu_irq_src *irq = &kiq->irq; in amdgpu_gfx_kiq_init_ring() 304 struct amdgpu_ring *ring = &kiq->ring; in amdgpu_gfx_kiq_init_ring() 307 spin_lock_init(&kiq->ring_lock); in amdgpu_gfx_kiq_init_ring() 315 (adev->doorbell_index.kiq + in amdgpu_gfx_kiq_init_ring() 323 ring->eop_gpu_addr = kiq->eop_gpu_addr; in amdgpu_gfx_kiq_init_ring() 343 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_fini() local 345 amdgpu_bo_free_kernel(&kiq->eop_obj, &kiq->eop_gpu_addr, NULL); in amdgpu_gfx_kiq_fini() 353 struct amdgpu_kiq *kiq = &adev->gfx.kiq[xcc_id]; in amdgpu_gfx_kiq_init() local 356 AMDGPU_GEM_DOMAIN_GTT, &kiq->eop_obj, in amdgpu_gfx_kiq_init() [all …]
|
H A D | amdgpu_gmc.c | 683 struct amdgpu_ring *ring = &adev->gfx.kiq[inst].ring; in amdgpu_gmc_flush_gpu_tlb_pasid() 684 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in amdgpu_gmc_flush_gpu_tlb_pasid() local 713 ndw = kiq->pmf->invalidate_tlbs_size + 8; in amdgpu_gmc_flush_gpu_tlb_pasid() 716 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid() 719 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid() 721 spin_lock(&adev->gfx.kiq[inst].ring_lock); in amdgpu_gmc_flush_gpu_tlb_pasid() 724 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in amdgpu_gmc_flush_gpu_tlb_pasid() 728 kiq->pmf->kiq_invalidate_tlbs(ring, pasid, 2, all_hub); in amdgpu_gmc_flush_gpu_tlb_pasid() 731 kiq->pmf->kiq_invalidate_tlbs(ring, pasid, 0, all_hub); in amdgpu_gmc_flush_gpu_tlb_pasid() 733 kiq->pmf->kiq_invalidate_tlbs(ring, pasid, flush_type, all_hub); in amdgpu_gmc_flush_gpu_tlb_pasid() [all …]
|
H A D | amdgpu_amdkfd.c | 831 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in amdgpu_amdkfd_unmap_hiq() local 832 struct amdgpu_ring *kiq_ring = &kiq->ring; in amdgpu_amdkfd_unmap_hiq() 837 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_amdkfd_unmap_hiq() 857 spin_lock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 859 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in amdgpu_amdkfd_unmap_hiq() 860 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 865 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, 0, 0); in amdgpu_amdkfd_unmap_hiq() 876 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq()
|
H A D | mes_v11_0.c | 1249 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v11_0_kiq_enable_queue() local 1250 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_enable_queue() 1253 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v11_0_kiq_enable_queue() 1256 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v11_0_kiq_enable_queue() 1262 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[0]); in mes_v11_0_kiq_enable_queue() 1274 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_queue_init() 1329 spin_lock_init(&adev->gfx.kiq[0].ring_lock); in mes_v11_0_kiq_ring_init() 1331 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_ring_init() 1357 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_mqd_sw_init() 1444 amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj, in mes_v11_0_sw_fini() [all …]
|
H A D | mes_v12_0.c | 1337 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v12_0_kiq_enable_queue() local 1338 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in mes_v12_0_kiq_enable_queue() 1341 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v12_0_kiq_enable_queue() 1344 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v12_0_kiq_enable_queue() 1350 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[0]); in mes_v12_0_kiq_enable_queue() 1367 ring = &adev->gfx.kiq[0].ring; in mes_v12_0_queue_init() 1439 spin_lock_init(&adev->gfx.kiq[0].ring_lock); in mes_v12_0_kiq_ring_init() 1441 ring = &adev->gfx.kiq[0].ring; in mes_v12_0_kiq_ring_init() 1467 ring = &adev->gfx.kiq[0].ring; in mes_v12_0_mqd_sw_init() 1553 amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj, in mes_v12_0_sw_fini() [all …]
|
H A D | gfx_v9_0.c | 1066 adev->gfx.kiq[0].pmf = &gfx_v9_0_kiq_pm4_funcs; in gfx_v9_0_set_kiq_pm4_funcs() 2446 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v9_0_sw_fini() 3444 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v9_0_cp_compute_enable() 3724 (adev->doorbell_index.kiq * 2) << 2); in gfx_v9_0_kiq_init_register() 3815 tmp_mqd = (struct v9_mqd *)adev->gfx.kiq[0].mqd_backup; in gfx_v9_0_kiq_init_queue() 3818 if (adev->gfx.kiq[0].mqd_backup) in gfx_v9_0_kiq_init_queue() 3819 memcpy(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue() 3843 if (adev->gfx.kiq[0].mqd_backup) in gfx_v9_0_kiq_init_queue() 3844 memcpy(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue() 3893 ring = &adev->gfx.kiq[0].ring; in gfx_v9_0_kiq_resume() [all …]
|
H A D | gfx_v9_4_3.c | 339 adev->gfx.kiq[i].pmf = &gfx_v9_4_3_kiq_pm4_funcs; in gfx_v9_4_3_set_kiq_pm4_funcs() 1218 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[i].ring); in gfx_v9_4_3_sw_fini() 1736 adev->gfx.kiq[xcc_id].ring.sched.ready = false; in gfx_v9_4_3_xcc_cp_compute_enable() 2024 ((adev->doorbell_index.kiq + in gfx_v9_4_3_xcc_kiq_init_register() 2113 tmp_mqd = (struct v9_mqd *)adev->gfx.kiq[xcc_id].mqd_backup; in gfx_v9_4_3_xcc_kiq_init_queue() 2116 if (adev->gfx.kiq[xcc_id].mqd_backup) in gfx_v9_4_3_xcc_kiq_init_queue() 2117 memcpy(mqd, adev->gfx.kiq[xcc_id].mqd_backup, sizeof(struct v9_mqd_allocation)); in gfx_v9_4_3_xcc_kiq_init_queue() 2140 if (adev->gfx.kiq[xcc_id].mqd_backup) in gfx_v9_4_3_xcc_kiq_init_queue() 2141 memcpy(adev->gfx.kiq[xcc_id].mqd_backup, mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_4_3_xcc_kiq_init_queue() 2211 ring = &adev->gfx.kiq[xcc_id].ring; in gfx_v9_4_3_xcc_kiq_resume() [all …]
|
H A D | gfx_v10_0.c | 3808 adev->gfx.kiq[0].pmf = &gfx_v10_0_kiq_pm4_funcs; in gfx_v10_0_set_kiq_pm4_funcs() 4767 &adev->gfx.kiq[0].irq); in gfx_v10_0_sw_init() 4924 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v10_0_sw_fini() 6532 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v10_0_cp_compute_enable() 7006 (adev->doorbell_index.kiq * 2) << 2); in gfx_v10_0_kiq_init_register() 7045 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue() 7046 memcpy_toio(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(*mqd)); in gfx_v10_0_kiq_init_queue() 7068 if (adev->gfx.kiq[0].mqd_backup) in gfx_v10_0_kiq_init_queue() 7069 memcpy_fromio(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(*mqd)); in gfx_v10_0_kiq_init_queue() 7109 ring = &adev->gfx.kiq[0].ring; in gfx_v10_0_kiq_resume() [all …]
|
H A D | amdgpu_amdkfd_gfx_v10_3.c | 280 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in hiq_mqd_load_v10_3() 295 spin_lock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v10_3() 322 spin_unlock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v10_3()
|
H A D | amdgpu_amdkfd_gfx_v11.c | 265 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in hiq_mqd_load_v11() 280 spin_lock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v11() 307 spin_unlock(&adev->gfx.kiq[0].ring_lock); in hiq_mqd_load_v11()
|
H A D | vega10_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_DOORBELL64_KIQ; in vega10_doorbell_index_init()
|
H A D | amdgpu_doorbell.h | 52 uint32_t kiq; member
|
H A D | vega20_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_VEGA20_DOORBELL_KIQ; in vega20_doorbell_index_init()
|
H A D | gfx_v12_0.c | 310 if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) { in gfx_v12_0_kiq_unmap_queues() 378 adev->gfx.kiq[0].pmf = &gfx_v12_0_kiq_pm4_funcs; in gfx_v12_0_set_kiq_pm4_funcs() 1545 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v12_0_sw_fini() 2713 adev->gfx.kiq[0].ring.sched.ready = enable; in gfx_v12_0_cp_compute_enable() 2872 (adev->doorbell_index.kiq * 2) << 2); in gfx_v12_0_cp_set_doorbell_range() 3239 (adev->doorbell_index.kiq * 2) << 2); in gfx_v12_0_kiq_init_register() 3343 ring = &adev->gfx.kiq[0].ring; in gfx_v12_0_kiq_resume() 4537 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v12_0_ring_preempt_ib() local 4538 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v12_0_ring_preempt_ib() 4544 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v12_0_ring_preempt_ib() [all …]
|
H A D | amdgpu_amdkfd_gfx_v10.c | 294 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in kgd_hiq_mqd_load() 309 spin_lock(&adev->gfx.kiq[0].ring_lock); in kgd_hiq_mqd_load() 336 spin_unlock(&adev->gfx.kiq[0].ring_lock); in kgd_hiq_mqd_load()
|
H A D | gfx_v11_0.c | 366 if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) { in gfx11_kiq_unmap_queues() 434 adev->gfx.kiq[0].pmf = &gfx_v11_0_kiq_pm4_funcs; in gfx_v11_0_set_kiq_pm4_funcs() 1803 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v11_0_sw_fini() 3943 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_cp_set_doorbell_range() 4317 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_kiq_init_register() 4356 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 4357 memcpy_toio(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(*mqd)); in gfx_v11_0_kiq_init_queue() 4379 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 4380 memcpy_fromio(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(*mqd)); in gfx_v11_0_kiq_init_queue() 4420 ring = &adev->gfx.kiq[0].ring; in gfx_v11_0_kiq_resume() [all …]
|
H A D | gfx_v8_0.c | 2067 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v8_0_sw_fini() 4308 adev->gfx.kiq[0].ring.sched.ready = false; in gfx_v8_0_cp_compute_enable() 4328 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_kiq_kcq_enable() 4614 if (adev->gfx.kiq[0].mqd_backup) in gfx_v8_0_kiq_init_queue() 4615 memcpy(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(struct vi_mqd_allocation)); in gfx_v8_0_kiq_init_queue() 4638 if (adev->gfx.kiq[0].mqd_backup) in gfx_v8_0_kiq_init_queue() 4639 memcpy(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(struct vi_mqd_allocation)); in gfx_v8_0_kiq_init_queue() 4677 WREG32(mmCP_MEC_DOORBELL_RANGE_LOWER, adev->doorbell_index.kiq << 2); in gfx_v8_0_set_mec_doorbell_range() 4689 ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_kiq_resume() 4753 ring = &adev->gfx.kiq[0].ring; in gfx_v8_0_cp_test_all_rings() [all …]
|
H A D | amdgpu_amdkfd_gfx_v9.c | 305 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[inst].ring; in kgd_gfx_v9_hiq_mqd_load() 320 spin_lock(&adev->gfx.kiq[inst].ring_lock); in kgd_gfx_v9_hiq_mqd_load() 347 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in kgd_gfx_v9_hiq_mqd_load()
|
H A D | gfx_v7_0.c | 4960 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v7_0_reset_kgq() local 4961 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v7_0_reset_kgq() 4969 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in gfx_v7_0_reset_kgq() 4972 spin_lock_irqsave(&kiq->ring_lock, flags); in gfx_v7_0_reset_kgq() 4975 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v7_0_reset_kgq() 4983 spin_unlock_irqrestore(&kiq->ring_lock, flags); in gfx_v7_0_reset_kgq()
|
H A D | soc24.c | 298 adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ; in soc24_init_doorbell_index()
|
H A D | amdgpu_gfx.h | 368 struct amdgpu_kiq kiq[AMDGPU_MAX_GC_INSTANCES]; member
|
H A D | gmc_v12_0.c | 305 if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring[0].sched.ready) && in gmc_v12_0_flush_gpu_tlb()
|
H A D | gmc_v11_0.c | 237 if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring[0].sched.ready) && in gmc_v11_0_flush_gpu_tlb()
|
H A D | soc21.c | 487 adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ; in soc21_init_doorbell_index()
|
H A D | gmc_v10_0.c | 276 if (adev->gfx.kiq[0].ring.sched.ready && !adev->enable_mes && in gmc_v10_0_flush_gpu_tlb()
|