/linux/drivers/gpu/drm/radeon/ |
H A D | r600_dma.c | 143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 150 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume() 236 u64 gpu_addr; in r600_dma_ring_test() local 243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 254 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test() 255 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test() 290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit() 317 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit() 343 u64 gpu_addr; in r600_dma_ib_test() local [all …]
|
H A D | uvd_v4_2.c | 47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume() 49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume() 67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume() 71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
|
H A D | cik_sdma.c | 154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 203 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit() 232 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit() 400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 407 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume() 408 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume() 651 u64 gpu_addr; in cik_sdma_ring_test() local 658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() [all …]
|
H A D | uvd_v2_2.c | 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit() 77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit() 113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume() 130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume() 134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
|
H A D | uvd_v1_0.c | 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit() 121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume() 138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume() 142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume() 364 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start() 374 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start() 487 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
|
H A D | radeon_semaphore.c | 51 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create() 69 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal() 86 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
|
H A D | vce_v1_0.c | 218 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume() 300 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start() 301 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start() 307 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start() 308 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
|
H A D | radeon_trace.h | 177 __field(uint64_t, gpu_addr) 183 __entry->gpu_addr = sem->gpu_addr; 187 __entry->waiters, __entry->gpu_addr)
|
H A D | evergreen_dma.c | 44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit() 88 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute() 89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
H A D | ni_dma.c | 144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 229 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
|
H A D | radeon_fence.c | 819 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring() 826 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring() 839 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring() 844 ring, rdev->fence_drv[ring].gpu_addr); in radeon_fence_driver_start_ring() 864 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | vce_v4_0.c | 157 uint64_t addr = table->gpu_addr; in vce_v4_0_mmsch_start() 235 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start() 237 upper_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start() 263 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start() 266 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start() 273 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start() 276 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start() 279 adev->vce.gpu_addr >> 8); in vce_v4_0_sriov_start() 282 (adev->vce.gpu_addr >> 40) & 0xff); in vce_v4_0_sriov_start() 345 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_BASE_LO), ring->gpu_addr); in vce_v4_0_start() [all …]
|
H A D | amdgpu_ih.c | 69 ih->gpu_addr = dma_addr; in amdgpu_ih_ring_init() 89 &ih->ring_obj, &ih->gpu_addr, in amdgpu_ih_ring_init() 97 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init() 99 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init() 128 (void *)ih->ring, ih->gpu_addr); in amdgpu_ih_ring_fini() 131 amdgpu_bo_free_kernel(&ih->ring_obj, &ih->gpu_addr, in amdgpu_ih_ring_fini() 133 amdgpu_device_wb_free(adev, (ih->wptr_addr - ih->gpu_addr) / 4); in amdgpu_ih_ring_fini() 134 amdgpu_device_wb_free(adev, (ih->rptr_addr - ih->gpu_addr) / 4); in amdgpu_ih_ring_fini()
|
H A D | vcn_v2_0.c | 391 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume() 393 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume() 403 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume() 405 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v2_0_mc_resume() 411 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume() 413 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_0_mc_resume() 419 lower_32_bits(adev->vcn.inst->fw_shared.gpu_addr)); in vcn_v2_0_mc_resume() 421 upper_32_bits(adev->vcn.inst->fw_shared.gpu_addr)); in vcn_v2_0_mc_resume() 457 lower_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode() 460 upper_32_bits(adev->vcn.inst->gpu_addr), 0, indirect); in vcn_v2_0_mc_resume_dpg_mode() [all …]
|
H A D | vcn_v2_5.c | 479 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume() 481 upper_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume() 490 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume() 492 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume() 498 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume() 500 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume() 506 lower_32_bits(adev->vcn.inst[i].fw_shared.gpu_addr)); in vcn_v2_5_mc_resume() 508 upper_32_bits(adev->vcn.inst[i].fw_shared.gpu_addr)); in vcn_v2_5_mc_resume() 543 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() 546 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode() [all …]
|
H A D | vcn_v1_0.c | 360 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode() 362 upper_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode() 372 lower_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode() 374 upper_32_bits(adev->vcn.inst->gpu_addr + offset)); in vcn_v1_0_mc_resume_spg_mode() 380 lower_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode() 382 upper_32_bits(adev->vcn.inst->gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v1_0_mc_resume_spg_mode() 430 lower_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode() 432 upper_32_bits(adev->vcn.inst->gpu_addr), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode() 442 lower_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode() 444 upper_32_bits(adev->vcn.inst->gpu_addr + offset), 0xFFFFFFFF, 0); in vcn_v1_0_mc_resume_dpg_mode() [all …]
|
H A D | vcn_v3_0.c | 506 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume() 508 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume() 517 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume() 519 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume() 525 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume() 527 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume() 533 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v3_0_mc_resume() 535 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v3_0_mc_resume() 569 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 572 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() [all …]
|
H A D | amdgpu_sa.c | 54 &sa_manager->bo, &sa_manager->gpu_addr, in amdgpu_sa_bo_manager_init() 76 amdgpu_bo_free_kernel(&sa_manager->bo, &sa_manager->gpu_addr, &sa_manager->cpu_ptr); in amdgpu_sa_bo_manager_fini() 114 drm_suballoc_dump_debug_info(&sa_manager->base, &p, sa_manager->gpu_addr); in amdgpu_sa_bo_dump_debug_info()
|
H A D | vce_v3_0.c | 283 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v3_0_start() 284 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start() 290 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v3_0_start() 291 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start() 297 WREG32(mmVCE_RB_BASE_LO3, ring->gpu_addr); in vce_v3_0_start() 298 WREG32(mmVCE_RB_BASE_HI3, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start() 566 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() 567 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR1, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() 568 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR2, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() 570 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() [all …]
|
H A D | vcn_v4_0_5.c | 386 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_5_mc_resume() 388 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_5_mc_resume() 396 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_5_mc_resume() 398 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_5_mc_resume() 404 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_5_mc_resume() 406 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_5_mc_resume() 412 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_5_mc_resume() 414 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_5_mc_resume() 462 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode() 465 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode() [all …]
|
H A D | amdgpu_seq64.h | 41 int amdgpu_seq64_alloc(struct amdgpu_device *adev, u64 *gpu_addr, u64 **cpu_addr); 42 void amdgpu_seq64_free(struct amdgpu_device *adev, u64 gpu_addr);
|
H A D | sdma_v4_4_2.c | 379 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v4_4_2_ring_emit_ib() 380 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v4_4_2_ring_emit_ib() 691 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_4_2_gfx_resume() 693 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_4_2_gfx_resume() 698 WREG32_SDMA(i, regSDMA_GFX_RB_BASE, ring->gpu_addr >> 8); in sdma_v4_4_2_gfx_resume() 699 WREG32_SDMA(i, regSDMA_GFX_RB_BASE_HI, ring->gpu_addr >> 40); in sdma_v4_4_2_gfx_resume() 729 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v4_4_2_gfx_resume() 785 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_4_2_page_resume() 787 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_4_2_page_resume() 792 WREG32_SDMA(i, regSDMA_PAGE_RB_BASE, ring->gpu_addr >> 8); in sdma_v4_4_2_page_resume() [all …]
|
H A D | sdma_v7_0.c | 314 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v7_0_ring_emit_ib() 315 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v7_0_ring_emit_ib() 555 …WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE), ring->gpu_addr >> … in sdma_v7_0_gfx_resume() 556 …WREG32_SOC15_IP(GC, sdma_v7_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE_HI), ring->gpu_addr … in sdma_v7_0_gfx_resume() 915 u64 gpu_addr; in sdma_v7_0_ring_test_ring() local 924 gpu_addr = amdgpu_mes_ctx_get_offs_gpu_addr(ring, offset); in sdma_v7_0_ring_test_ring() 934 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v7_0_ring_test_ring() 948 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v7_0_ring_test_ring() 949 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v7_0_ring_test_ring() 993 u64 gpu_addr; in sdma_v7_0_ring_test_ib() local [all …]
|
H A D | sdma_v3_0.c | 434 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib() 435 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib() 690 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v3_0_gfx_resume() 691 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v3_0_gfx_resume() 810 u64 gpu_addr; in sdma_v3_0_ring_test_ring() local 816 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring() 826 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring() 827 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring() 863 u64 gpu_addr; in sdma_v3_0_ring_test_ib() local 870 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib() [all …]
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_mqd_manager.c | 58 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr; in allocate_hiq_mqd() 84 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in allocate_sdma_mqd() 285 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in kfd_get_hiq_xcc_mqd()
|