Searched refs:kiq (Results 1 – 14 of 14) sorted by relevance
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_gmc.c | 721 struct amdgpu_ring *ring = &adev->gfx.kiq[inst].ring; in amdgpu_gmc_flush_gpu_tlb_pasid() 722 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in amdgpu_gmc_flush_gpu_tlb_pasid() local 757 ndw = kiq->pmf->invalidate_tlbs_size + 8; in amdgpu_gmc_flush_gpu_tlb_pasid() 760 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid() 763 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid() 765 spin_lock(&adev->gfx.kiq[inst].ring_lock); in amdgpu_gmc_flush_gpu_tlb_pasid() 768 spin_unlock(&adev->gfx.kiq[inst].ring_lock); in amdgpu_gmc_flush_gpu_tlb_pasid() 772 kiq->pmf->kiq_invalidate_tlbs(ring, pasid, 2, all_hub); in amdgpu_gmc_flush_gpu_tlb_pasid() 775 kiq->pmf->kiq_invalidate_tlbs(ring, pasid, 0, all_hub); in amdgpu_gmc_flush_gpu_tlb_pasid() 777 kiq->pmf->kiq_invalidate_tlbs(ring, pasid, flush_type, all_hub); in amdgpu_gmc_flush_gpu_tlb_pasid() [all …]
|
| H A D | amdgpu_amdkfd.c | 816 struct amdgpu_kiq *kiq = &adev->gfx.kiq[inst]; in amdgpu_amdkfd_unmap_hiq() local 817 struct amdgpu_ring *kiq_ring = &kiq->ring; in amdgpu_amdkfd_unmap_hiq() 822 if (!kiq->pmf || !kiq->pmf->kiq_unmap_queues) in amdgpu_amdkfd_unmap_hiq() 842 spin_lock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 844 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in amdgpu_amdkfd_unmap_hiq() 845 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq() 850 kiq->pmf->kiq_unmap_queues(kiq_ring, ring, RESET_QUEUES, 0, 0); in amdgpu_amdkfd_unmap_hiq() 861 spin_unlock(&kiq->ring_lock); in amdgpu_amdkfd_unmap_hiq()
|
| H A D | mes_v11_0.c | 1281 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v11_0_kiq_enable_queue() local 1282 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_enable_queue() 1285 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v11_0_kiq_enable_queue() 1288 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v11_0_kiq_enable_queue() 1294 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[0]); in mes_v11_0_kiq_enable_queue() 1306 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_queue_init() 1361 spin_lock_init(&adev->gfx.kiq[0].ring_lock); in mes_v11_0_kiq_ring_init() 1363 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_kiq_ring_init() 1389 ring = &adev->gfx.kiq[0].ring; in mes_v11_0_mqd_sw_init() 1496 amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj, in mes_v11_0_sw_fini() [all …]
|
| H A D | mes_v12_0.c | 1445 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in mes_v12_0_kiq_enable_queue() local 1446 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq[0].ring; in mes_v12_0_kiq_enable_queue() 1449 if (!kiq->pmf || !kiq->pmf->kiq_map_queues) in mes_v12_0_kiq_enable_queue() 1452 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v12_0_kiq_enable_queue() 1458 kiq->pmf->kiq_map_queues(kiq_ring, &adev->mes.ring[0]); in mes_v12_0_kiq_enable_queue() 1475 ring = &adev->gfx.kiq[0].ring; in mes_v12_0_queue_init() 1550 spin_lock_init(&adev->gfx.kiq[0].ring_lock); in mes_v12_0_kiq_ring_init() 1552 ring = &adev->gfx.kiq[0].ring; in mes_v12_0_kiq_ring_init() 1578 ring = &adev->gfx.kiq[0].ring; in mes_v12_0_mqd_sw_init() 1679 amdgpu_bo_free_kernel(&adev->gfx.kiq[0].ring.mqd_obj, in mes_v12_0_sw_fini() [all …]
|
| H A D | vega10_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_DOORBELL64_KIQ; in vega10_doorbell_index_init()
|
| H A D | amdgpu_doorbell.h | 52 uint32_t kiq; member
|
| H A D | vega20_reg_init.c | 60 adev->doorbell_index.kiq = AMDGPU_VEGA20_DOORBELL_KIQ; in vega20_doorbell_index_init()
|
| H A D | gfx_v12_0.c | 357 if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) { in gfx_v12_0_kiq_unmap_queues() 425 adev->gfx.kiq[0].pmf = &gfx_v12_0_kiq_pm4_funcs; in gfx_v12_0_set_kiq_pm4_funcs() 1638 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v12_0_sw_fini() 2799 adev->gfx.kiq[0].ring.sched.ready = enable; in gfx_v12_0_cp_compute_enable() 2958 (adev->doorbell_index.kiq * 2) << 2); in gfx_v12_0_cp_set_doorbell_range() 3323 (adev->doorbell_index.kiq * 2) << 2); in gfx_v12_0_kiq_init_register() 3424 gfx_v12_0_kiq_init_queue(&adev->gfx.kiq[0].ring); in gfx_v12_0_kiq_resume() 3425 adev->gfx.kiq[0].ring.sched.ready = true; in gfx_v12_0_kiq_resume() 4590 struct amdgpu_kiq *kiq = &adev->gfx.kiq[0]; in gfx_v12_0_ring_preempt_ib() local 4591 struct amdgpu_ring *kiq_ring = &kiq->ring; in gfx_v12_0_ring_preempt_ib() [all …]
|
| H A D | gfx_v11_0.c | 418 if (adev->enable_mes && !adev->gfx.kiq[0].ring.sched.ready) { in gfx11_kiq_unmap_queues() 486 adev->gfx.kiq[0].pmf = &gfx_v11_0_kiq_pm4_funcs; in gfx_v11_0_set_kiq_pm4_funcs() 1922 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq[0].ring); in gfx_v11_0_sw_fini() 4063 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_cp_set_doorbell_range() 4446 (adev->doorbell_index.kiq * 2) << 2); in gfx_v11_0_kiq_init_register() 4485 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 4486 memcpy_toio(mqd, adev->gfx.kiq[0].mqd_backup, sizeof(*mqd)); in gfx_v11_0_kiq_init_queue() 4508 if (adev->gfx.kiq[0].mqd_backup) in gfx_v11_0_kiq_init_queue() 4509 memcpy_fromio(adev->gfx.kiq[0].mqd_backup, mqd, sizeof(*mqd)); in gfx_v11_0_kiq_init_queue() 4546 gfx_v11_0_kiq_init_queue(&adev->gfx.kiq[0].ring); in gfx_v11_0_kiq_resume() [all …]
|
| H A D | gmc_v12_0.c | 331 if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring[0].sched.ready) && in gmc_v12_0_flush_gpu_tlb()
|
| H A D | gmc_v11_0.c | 263 if ((adev->gfx.kiq[0].ring.sched.ready || adev->mes.ring[0].sched.ready) && in gmc_v11_0_flush_gpu_tlb()
|
| H A D | gmc_v10_0.c | 275 if (adev->gfx.kiq[0].ring.sched.ready && !adev->enable_mes && in gmc_v10_0_flush_gpu_tlb()
|
| H A D | aqua_vanjaram.c | 41 adev->doorbell_index.kiq = AMDGPU_DOORBELL_LAYOUT1_KIQ_START; in aqua_vanjaram_doorbell_index_init()
|
| H A D | gmc_v9_0.c | 859 if (adev->gfx.kiq[inst].ring.sched.ready && in gmc_v9_0_flush_gpu_tlb()
|