/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_gfx.c | 522 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size * in amdgpu_gfx_disable_kcq() 575 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size * in amdgpu_gfx_disable_kgq() 626 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->set_resources_size); in amdgpu_gfx_mes_enable_kcq() 687 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size * in amdgpu_gfx_enable_kcq() 745 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size * in amdgpu_gfx_enable_kgq() 1048 r = amdgpu_ring_alloc(ring, 32); in amdgpu_kiq_rreg() 1117 r = amdgpu_ring_alloc(ring, 32); in amdgpu_kiq_wreg()
|
H A D | amdgpu_ring_mux.c | 66 amdgpu_ring_alloc(real_ring, (ring->ring_size >> 2) + end - start); in amdgpu_ring_mux_copy_pkt_from_sw_ring() 71 amdgpu_ring_alloc(real_ring, end - start); in amdgpu_ring_mux_copy_pkt_from_sw_ring()
|
H A D | amdgpu_ib.c | 187 r = amdgpu_ring_alloc(ring, alloc_size); in amdgpu_ib_schedule()
|
H A D | amdgpu_gmc.c | 722 r = amdgpu_ring_alloc(ring, ndw); in amdgpu_gmc_flush_gpu_tlb_pasid() 772 amdgpu_ring_alloc(ring, 32); in amdgpu_gmc_fw_reg_write_reg_wait()
|
H A D | amdgpu_ring.h | 342 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw);
|
H A D | amdgpu_amdkfd_gfx_v10_3.c | 296 r = amdgpu_ring_alloc(kiq_ring, 7); in hiq_mqd_load_v10_3()
|
H A D | amdgpu_ring.c | 81 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned int ndw) in amdgpu_ring_alloc() function
|
H A D | gfx_v9_0.c | 1176 r = amdgpu_ring_alloc(ring, 3); in gfx_v9_0_ring_test_ring() 3301 r = amdgpu_ring_alloc(ring, gfx_v9_0_get_csb_size(adev) + 4 + 3); in gfx_v9_0_cp_gfx_start() 4208 amdgpu_ring_alloc(ring, 32); in gfx_v9_0_kiq_read_clock() 4579 r = amdgpu_ring_alloc(ring, 7); in gfx_v9_0_do_edc_gds_workarounds() 5749 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v9_0_ring_preempt_ib() 5758 amdgpu_ring_alloc(ring, 13); in gfx_v9_0_ring_preempt_ib() 7241 if (amdgpu_ring_alloc(kiq_ring, 5)) { in gfx_v9_0_reset_kgq() 7257 if (amdgpu_ring_alloc(ring, 7 + 7 + 5)) in gfx_v9_0_reset_kgq() 7286 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v9_0_reset_kcq() 7337 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in gfx_v9_0_reset_kcq()
|
H A D | mes_v12_0.c | 182 r = amdgpu_ring_alloc(ring, (size + sizeof(mes_status_pkt)) / 4); in mes_v12_0_submit_pkt_and_poll_completion() 1190 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v12_0_kiq_enable_queue()
|
H A D | sdma_v7_0.c | 938 r = amdgpu_ring_alloc(ring, 5); in sdma_v7_0_ring_test_ring() 1425 r = amdgpu_ring_alloc(ring, 10); in sdma_v7_0_ring_preempt_ib()
|
H A D | amdgpu_amdkfd_gfx_v11.c | 281 r = amdgpu_ring_alloc(kiq_ring, 7); in hiq_mqd_load_v11()
|
H A D | amdgpu_amdkfd.c | 856 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in amdgpu_amdkfd_unmap_hiq()
|
H A D | mes_v11_0.c | 196 r = amdgpu_ring_alloc(ring, (size + sizeof(mes_status_pkt)) / 4); in mes_v11_0_submit_pkt_and_poll_completion() 1256 r = amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size); in mes_v11_0_kiq_enable_queue()
|
H A D | sdma_v6_0.c | 941 r = amdgpu_ring_alloc(ring, 5); in sdma_v6_0_ring_test_ring() 1479 amdgpu_ring_alloc(ring, 10); in sdma_v6_0_ring_preempt_ib()
|
H A D | sdma_v5_2.c | 928 r = amdgpu_ring_alloc(ring, 20); in sdma_v5_2_ring_test_ring() 1578 amdgpu_ring_alloc(ring, 10); in sdma_v5_2_ring_preempt_ib()
|
H A D | amdgpu_amdkfd_gfx_v10.c | 310 r = amdgpu_ring_alloc(kiq_ring, 7); in kgd_hiq_mqd_load()
|
H A D | sdma_v5_0.c | 1071 r = amdgpu_ring_alloc(ring, 20); in sdma_v5_0_ring_test_ring() 1677 amdgpu_ring_alloc(ring, 10); in sdma_v5_0_ring_preempt_ib()
|
H A D | amdgpu_umsch_mm.c | 496 if (amdgpu_ring_alloc(ring, ndws)) in amdgpu_umsch_mm_submit_pkt()
|
H A D | gfx_v7_0.c | 2032 r = amdgpu_ring_alloc(ring, 3); in gfx_v7_0_ring_test_ring() 2465 r = amdgpu_ring_alloc(ring, gfx_v7_0_get_csb_size(adev) + 8); in gfx_v7_0_cp_gfx_start() 4968 if (amdgpu_ring_alloc(kiq_ring, 5)) { in gfx_v7_0_reset_kgq() 4983 if (amdgpu_ring_alloc(ring, 7 + 12 + 5)) in gfx_v7_0_reset_kgq()
|
H A D | gfx_v8_0.c | 846 r = amdgpu_ring_alloc(ring, 3); in gfx_v8_0_ring_test_ring() 4150 r = amdgpu_ring_alloc(ring, gfx_v8_0_get_csb_size(adev) + 4); in gfx_v8_0_cp_gfx_start() 4333 r = amdgpu_ring_alloc(kiq_ring, (8 * adev->gfx.num_compute_rings) + 8); in gfx_v8_0_kiq_kcq_enable() 4808 r = amdgpu_ring_alloc(kiq_ring, 6 * adev->gfx.num_compute_rings); in gfx_v8_0_kcq_disable() 6914 if (amdgpu_ring_alloc(kiq_ring, 5)) { in gfx_v8_0_reset_kgq() 6929 if (amdgpu_ring_alloc(ring, 7 + 12 + 5)) in gfx_v8_0_reset_kgq()
|
H A D | amdgpu_amdkfd_gfx_v9.c | 321 r = amdgpu_ring_alloc(kiq_ring, 7); in kgd_gfx_v9_hiq_mqd_load()
|
H A D | sdma_v3_0.c | 820 r = amdgpu_ring_alloc(ring, 5); in sdma_v3_0_ring_test_ring()
|
H A D | gfx_v10_0.c | 3960 r = amdgpu_ring_alloc(ring, 3); in gfx_v10_0_ring_test_ring() 6248 r = amdgpu_ring_alloc(ring, gfx_v10_0_get_csb_size(adev) + 4); in gfx_v10_0_cp_gfx_start() 6298 r = amdgpu_ring_alloc(ring, 2); in gfx_v10_0_cp_gfx_start() 8795 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_ring_preempt_ib() 9462 if (amdgpu_ring_alloc(kiq_ring, 5 + 7 + 7 + kiq->pmf->map_queues_size)) { in gfx_v10_0_reset_kgq() 9528 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v10_0_reset_kcq() 9579 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->map_queues_size)) { in gfx_v10_0_reset_kcq()
|
H A D | gfx_v11_0.c | 515 r = amdgpu_ring_alloc(ring, 5); in gfx_v11_0_ring_test_ring() 3489 r = amdgpu_ring_alloc(ring, gfx_v11_0_get_csb_size(adev)); in gfx_v11_0_cp_gfx_start() 3534 r = amdgpu_ring_alloc(ring, 2); in gfx_v11_0_cp_gfx_start() 5993 if (amdgpu_ring_alloc(kiq_ring, kiq->pmf->unmap_queues_size)) { in gfx_v11_0_ring_preempt_ib()
|
H A D | vcn_v2_0.c | 1777 r = amdgpu_ring_alloc(ring, 4); in vcn_v2_0_dec_ring_test_ring()
|