Lines Matching refs:uvd
67 (!adev->uvd.fw_version || adev->uvd.fw_version >= FW_1_130_16)); in uvd_v6_0_enc_support()
95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr()
125 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_wptr()
156 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_set_wptr()
335 struct amdgpu_bo *bo = ring->adev->uvd.ib_bo; in uvd_v6_0_enc_ring_test_ib()
360 adev->uvd.num_uvd_inst = 1; in uvd_v6_0_early_init()
369 adev->uvd.num_enc_rings = 2; in uvd_v6_0_early_init()
385 …id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_UVD_SYSTEM_MESSAGE, &adev->uvd.inst->irq); in uvd_v6_0_sw_init()
391 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v6_0_sw_init()
392 …(adev, AMDGPU_IRQ_CLIENTID_LEGACY, i + VISLANDS30_IV_SRCID_UVD_ENC_GEN_PURP, &adev->uvd.inst->irq); in uvd_v6_0_sw_init()
403 for (i = 0; i < adev->uvd.num_enc_rings; ++i) in uvd_v6_0_sw_init()
404 adev->uvd.inst->ring_enc[i].funcs = NULL; in uvd_v6_0_sw_init()
406 adev->uvd.inst->irq.num_types = 1; in uvd_v6_0_sw_init()
407 adev->uvd.num_enc_rings = 0; in uvd_v6_0_sw_init()
412 ring = &adev->uvd.inst->ring; in uvd_v6_0_sw_init()
414 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0, in uvd_v6_0_sw_init()
424 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v6_0_sw_init()
425 ring = &adev->uvd.inst->ring_enc[i]; in uvd_v6_0_sw_init()
428 &adev->uvd.inst->irq, 0, in uvd_v6_0_sw_init()
448 for (i = 0; i < adev->uvd.num_enc_rings; ++i) in uvd_v6_0_sw_fini()
449 amdgpu_ring_fini(&adev->uvd.inst->ring_enc[i]); in uvd_v6_0_sw_fini()
465 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v6_0_hw_init()
505 for (i = 0; i < adev->uvd.num_enc_rings; ++i) { in uvd_v6_0_hw_init()
506 ring = &adev->uvd.inst->ring_enc[i]; in uvd_v6_0_hw_init()
535 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v6_0_hw_fini()
566 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v6_0_suspend()
611 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
613 upper_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v6_0_mc_resume()
627 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles); in uvd_v6_0_mc_resume()
635 WREG32(mmUVD_GP_SCRATCH4, adev->uvd.max_handles); in uvd_v6_0_mc_resume()
726 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v6_0_start()
866 ring = &adev->uvd.inst->ring_enc[0]; in uvd_v6_0_start()
873 ring = &adev->uvd.inst->ring_enc[1]; in uvd_v6_0_start()
1178 adev->uvd.inst->srbm_soft_reset = srbm_soft_reset; in uvd_v6_0_check_soft_reset()
1181 adev->uvd.inst->srbm_soft_reset = 0; in uvd_v6_0_check_soft_reset()
1190 if (!adev->uvd.inst->srbm_soft_reset) in uvd_v6_0_pre_soft_reset()
1202 if (!adev->uvd.inst->srbm_soft_reset) in uvd_v6_0_soft_reset()
1204 srbm_soft_reset = adev->uvd.inst->srbm_soft_reset; in uvd_v6_0_soft_reset()
1232 if (!adev->uvd.inst->srbm_soft_reset) in uvd_v6_0_post_soft_reset()
1258 amdgpu_fence_process(&adev->uvd.inst->ring); in uvd_v6_0_process_interrupt()
1262 amdgpu_fence_process(&adev->uvd.inst->ring_enc[0]); in uvd_v6_0_process_interrupt()
1268 amdgpu_fence_process(&adev->uvd.inst->ring_enc[1]); in uvd_v6_0_process_interrupt()
1633 adev->uvd.inst->ring.funcs = &uvd_v6_0_ring_vm_funcs; in uvd_v6_0_set_ring_funcs()
1636 adev->uvd.inst->ring.funcs = &uvd_v6_0_ring_phys_funcs; in uvd_v6_0_set_ring_funcs()
1645 for (i = 0; i < adev->uvd.num_enc_rings; ++i) in uvd_v6_0_set_enc_ring_funcs()
1646 adev->uvd.inst->ring_enc[i].funcs = &uvd_v6_0_enc_ring_vm_funcs; in uvd_v6_0_set_enc_ring_funcs()
1659 adev->uvd.inst->irq.num_types = adev->uvd.num_enc_rings + 1; in uvd_v6_0_set_irq_funcs()
1661 adev->uvd.inst->irq.num_types = 1; in uvd_v6_0_set_irq_funcs()
1663 adev->uvd.inst->irq.funcs = &uvd_v6_0_irq_funcs; in uvd_v6_0_set_irq_funcs()