| /linux/drivers/gpu/drm/radeon/ |
| H A D | r600_dma.c | 143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 150 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume() 236 u64 gpu_addr; in r600_dma_ring_test() local 243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 254 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test() 255 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test() 290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit() 317 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit() 343 u64 gpu_addr; in r600_dma_ib_test() local [all …]
|
| H A D | uvd_v4_2.c | 47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume() 49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume() 67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume() 71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
|
| H A D | cik_sdma.c | 154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 203 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit() 232 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit() 400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 407 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume() 408 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume() 651 u64 gpu_addr; in cik_sdma_ring_test() local 658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() [all …]
|
| H A D | uvd_v2_2.c | 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit() 77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit() 113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume() 130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume() 134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
|
| H A D | uvd_v1_0.c | 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit() 121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume() 138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume() 142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume() 364 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start() 374 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start() 487 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
|
| H A D | radeon_semaphore.c | 51 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create() 69 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal() 86 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
|
| H A D | vce_v1_0.c | 218 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume() 300 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start() 301 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start() 307 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start() 308 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
|
| H A D | radeon_trace.h | 177 __field(uint64_t, gpu_addr) 183 __entry->gpu_addr = sem->gpu_addr; 187 __entry->waiters, __entry->gpu_addr)
|
| H A D | evergreen_dma.c | 44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit() 88 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute() 89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
| H A D | ni_dma.c | 144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 229 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
|
| H A D | radeon_fence.c | 770 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring() 777 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring() 790 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring() 795 ring, rdev->fence_drv[ring].gpu_addr); in radeon_fence_driver_start_ring() 815 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
|
| H A D | radeon_object.h | 146 extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr); 148 u64 max_offset, u64 *gpu_addr); 180 return to_radeon_sa_manager(sa_bo->manager)->gpu_addr + in radeon_sa_bo_gpu_addr()
|
| H A D | radeon_object.c | 273 u64 *gpu_addr) in radeon_bo_pin_restricted() argument 283 if (gpu_addr) in radeon_bo_pin_restricted() 284 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted() 319 if (gpu_addr != NULL) in radeon_bo_pin_restricted() 320 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted() 331 int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) in radeon_bo_pin() argument 333 return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr); in radeon_bo_pin()
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_isp.c | 180 u64 gpu_addr; in isp_user_buffer_alloc() local 204 AMDGPU_GEM_DOMAIN_GTT, &bo, &gpu_addr); in isp_user_buffer_alloc() 211 *buf_addr = gpu_addr; in isp_user_buffer_alloc() 251 void **buf_obj, u64 *gpu_addr, void **cpu_addr) in isp_kernel_buffer_alloc() argument 266 if (WARN_ON(!gpu_addr)) in isp_kernel_buffer_alloc() 290 gpu_addr, in isp_kernel_buffer_alloc() 313 void isp_kernel_buffer_free(void **buf_obj, u64 *gpu_addr, void **cpu_addr) in isp_kernel_buffer_free() argument 317 amdgpu_bo_free_kernel(bo, gpu_addr, cpu_addr); in isp_kernel_buffer_free()
|
| H A D | mes_userqueue.c | 96 queue->wptr_obj.gpu_addr = amdgpu_bo_gpu_offset_no_check(wptr_obj->obj); in mes_userq_create_wptr_mapping() 134 queue_input.process_context_addr = ctx->gpu_addr; in mes_userq_map() 135 queue_input.gang_context_addr = ctx->gpu_addr + AMDGPU_USERQ_PROC_CTX_SZ; in mes_userq_map() 141 queue_input.mqd_addr = queue->mqd.gpu_addr; in mes_userq_map() 146 queue_input.wptr_mc_addr = queue->wptr_obj.gpu_addr; in mes_userq_map() 170 queue_input.gang_context_addr = ctx->gpu_addr + AMDGPU_USERQ_PROC_CTX_SZ; in mes_userq_unmap() 282 userq_props->mqd_gpu_addr = queue->mqd.gpu_addr; in mes_userq_mqd_create() 285 userq_props->fence_address = queue->fence_drv->gpu_addr; in mes_userq_mqd_create() 441 fence_gpu_addr = adev->wb.gpu_addr + (fence_offset * 4); in mes_userq_preempt() 446 queue_input.gang_context_addr = ctx->gpu_addr + AMDGPU_USERQ_PROC_CTX_SZ; in mes_userq_preempt() [all …]
|
| H A D | vcn_v5_0_1.c | 441 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v5_0_1_mc_resume() 443 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v5_0_1_mc_resume() 452 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v5_0_1_mc_resume() 454 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v5_0_1_mc_resume() 460 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v5_0_1_mc_resume() 462 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v5_0_1_mc_resume() 468 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v5_0_1_mc_resume() 470 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v5_0_1_mc_resume() 520 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode() 523 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v5_0_1_mc_resume_dpg_mode() [all …]
|
| H A D | amdgpu_sa.c | 54 &sa_manager->bo, &sa_manager->gpu_addr, in amdgpu_sa_bo_manager_init() 76 amdgpu_bo_free_kernel(&sa_manager->bo, &sa_manager->gpu_addr, &sa_manager->cpu_ptr); in amdgpu_sa_bo_manager_fini() 113 drm_suballoc_dump_debug_info(&sa_manager->base, &p, sa_manager->gpu_addr); in amdgpu_sa_bo_dump_debug_info()
|
| H A D | vcn_v3_0.c | 537 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume() 539 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v3_0_mc_resume() 548 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume() 550 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v3_0_mc_resume() 556 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume() 558 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v3_0_mc_resume() 564 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v3_0_mc_resume() 566 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v3_0_mc_resume() 603 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() 606 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode() [all …]
|
| H A D | vcn_v4_0.c | 467 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_mc_resume() 469 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_mc_resume() 477 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_mc_resume() 479 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_mc_resume() 485 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_mc_resume() 487 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_mc_resume() 493 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_mc_resume() 495 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_mc_resume() 542 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_mc_resume_dpg_mode() 545 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_mc_resume_dpg_mode() [all …]
|
| H A D | vcn_v4_0_3.c | 477 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr)); in vcn_v4_0_3_mc_resume() 480 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr)); in vcn_v4_0_3_mc_resume() 489 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset)); in vcn_v4_0_3_mc_resume() 491 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset)); in vcn_v4_0_3_mc_resume() 498 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + in vcn_v4_0_3_mc_resume() 501 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr + offset + in vcn_v4_0_3_mc_resume() 510 lower_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr)); in vcn_v4_0_3_mc_resume() 513 upper_32_bits(adev->vcn.inst[inst_idx].fw_shared.gpu_addr)); in vcn_v4_0_3_mc_resume() 564 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode() 567 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_3_mc_resume_dpg_mode() [all …]
|
| H A D | vcn_v4_0_5.c | 418 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_5_mc_resume() 420 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_5_mc_resume() 428 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_5_mc_resume() 430 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_5_mc_resume() 436 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_5_mc_resume() 438 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_5_mc_resume() 444 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_5_mc_resume() 446 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_5_mc_resume() 496 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode() 499 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode() [all …]
|
| H A D | vcn_sw_ring.c | 51 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib() 52 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
|
| H A D | amdgpu_fence.c | 119 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit() 176 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling() 424 ring->fence_drv.gpu_addr = ring->fence_gpu_addr; in amdgpu_fence_driver_start_ring() 429 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring() 438 ring->name, ring->fence_drv.gpu_addr); in amdgpu_fence_driver_start_ring() 462 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
|
| /linux/drivers/gpu/drm/amd/amdkfd/ |
| H A D | kfd_mqd_manager.c | 58 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr; in allocate_hiq_mqd() 84 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in allocate_sdma_mqd() 285 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in kfd_get_hiq_xcc_mqd()
|
| /linux/include/drm/amd/ |
| H A D | isp.h | 47 void **buf_obj, u64 *gpu_addr, void **cpu_addr); 49 void isp_kernel_buffer_free(void **buf_obj, u64 *gpu_addr, void **cpu_addr);
|