Lines Matching refs:vce

83 	if (adev->vce.harvest_config == 0 ||  in vce_v3_0_ring_get_rptr()
84 adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE1) in vce_v3_0_ring_get_rptr()
86 else if (adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE0) in vce_v3_0_ring_get_rptr()
115 if (adev->vce.harvest_config == 0 || in vce_v3_0_ring_get_wptr()
116 adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE1) in vce_v3_0_ring_get_wptr()
118 else if (adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE0) in vce_v3_0_ring_get_wptr()
146 if (adev->vce.harvest_config == 0 || in vce_v3_0_ring_set_wptr()
147 adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE1) in vce_v3_0_ring_set_wptr()
149 else if (adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE0) in vce_v3_0_ring_set_wptr()
272 if (adev->vce.harvest_config & (1 << idx)) in vce_v3_0_start()
279 if (idx != 1 || adev->vce.harvest_config == AMDGPU_VCE_HARVEST_VCE0) { in vce_v3_0_start()
280 ring = &adev->vce.ring[0]; in vce_v3_0_start()
287 ring = &adev->vce.ring[1]; in vce_v3_0_start()
294 ring = &adev->vce.ring[2]; in vce_v3_0_start()
337 if (adev->vce.harvest_config & (1 << idx)) in vce_v3_0_stop()
403 adev->vce.harvest_config = vce_v3_0_get_harvest_config(adev); in vce_v3_0_early_init()
405 if ((adev->vce.harvest_config & in vce_v3_0_early_init()
410 adev->vce.num_rings = 3; in vce_v3_0_early_init()
425 …r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_VCE_TRAP, &adev->vce.i… in vce_v3_0_sw_init()
435 if (adev->vce.fw_version < FW_52_8_3) in vce_v3_0_sw_init()
436 adev->vce.num_rings = 2; in vce_v3_0_sw_init()
442 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v3_0_sw_init()
445 ring = &adev->vce.ring[i]; in vce_v3_0_sw_init()
447 r = amdgpu_ring_init(adev, ring, 512, &adev->vce.irq, 0, in vce_v3_0_sw_init()
477 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v3_0_hw_init()
478 r = amdgpu_ring_test_helper(&adev->vce.ring[i]); in vce_v3_0_hw_init()
493 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v3_0_hw_fini()
519 cancel_delayed_work_sync(&adev->vce.idle_work); in vce_v3_0_suspend()
566 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
567 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR1, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
568 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR2, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
570 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
605 mask |= (adev->vce.harvest_config & AMDGPU_VCE_HARVEST_VCE0) ? 0 : SRBM_STATUS2__VCE0_BUSY_MASK; in vce_v3_0_is_idle()
606 mask |= (adev->vce.harvest_config & AMDGPU_VCE_HARVEST_VCE1) ? 0 : SRBM_STATUS2__VCE1_BUSY_MASK; in vce_v3_0_is_idle()
662 adev->vce.srbm_soft_reset = srbm_soft_reset; in vce_v3_0_check_soft_reset()
665 adev->vce.srbm_soft_reset = 0; in vce_v3_0_check_soft_reset()
675 if (!adev->vce.srbm_soft_reset) in vce_v3_0_soft_reset()
677 srbm_soft_reset = adev->vce.srbm_soft_reset; in vce_v3_0_soft_reset()
705 if (!adev->vce.srbm_soft_reset) in vce_v3_0_pre_soft_reset()
718 if (!adev->vce.srbm_soft_reset) in vce_v3_0_post_soft_reset()
752 amdgpu_fence_process(&adev->vce.ring[entry->src_data[0]]); in vce_v3_0_process_interrupt()
776 if (adev->vce.harvest_config & (1 << i)) in vce_v3_0_set_clockgating_state()
972 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v3_0_set_ring_funcs()
973 adev->vce.ring[i].funcs = &vce_v3_0_ring_vm_funcs; in vce_v3_0_set_ring_funcs()
974 adev->vce.ring[i].me = i; in vce_v3_0_set_ring_funcs()
978 for (i = 0; i < adev->vce.num_rings; i++) { in vce_v3_0_set_ring_funcs()
979 adev->vce.ring[i].funcs = &vce_v3_0_ring_phys_funcs; in vce_v3_0_set_ring_funcs()
980 adev->vce.ring[i].me = i; in vce_v3_0_set_ring_funcs()
993 adev->vce.irq.num_types = 1; in vce_v3_0_set_irq_funcs()
994 adev->vce.irq.funcs = &vce_v3_0_irq_funcs; in vce_v3_0_set_irq_funcs()