Home
last modified time | relevance | path

Searched refs:gpu_addr (Results 1 – 25 of 91) sorted by relevance

1234

/linux/drivers/gpu/drm/radeon/
H A Dr600_dma.c143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
150 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
236 u64 gpu_addr; in r600_dma_ring_test() local
243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
254 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test()
255 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test()
290 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit()
317 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit()
343 u64 gpu_addr; in r600_dma_ib_test() local
[all …]
H A Duvd_v4_2.c47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume()
49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume()
67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume()
71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
H A Dcik_sdma.c154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
203 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit()
232 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit()
400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
407 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume()
408 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume()
651 u64 gpu_addr; in cik_sdma_ring_test() local
658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
[all …]
H A Duvd_v2_2.c43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit()
77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit()
113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume()
130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume()
134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
H A Dradeon_semaphore.c51 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create()
69 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal()
86 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
H A Dvce_v1_0.c218 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume()
300 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start()
301 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
307 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start()
308 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
H A Dradeon_trace.h177 __field(uint64_t, gpu_addr)
183 __entry->gpu_addr = sem->gpu_addr;
187 __entry->waiters, __entry->gpu_addr)
H A Devergreen_dma.c44 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
88 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute()
89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
H A Dni_dma.c144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()
224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
229 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
H A Dradeon_fence.c770 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
777 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring()
790 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
795 ring, rdev->fence_drv[ring].gpu_addr); in radeon_fence_driver_start_ring()
815 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
H A Dradeon_object.h146 extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
148 u64 max_offset, u64 *gpu_addr);
180 return to_radeon_sa_manager(sa_bo->manager)->gpu_addr + in radeon_sa_bo_gpu_addr()
H A Dradeon_object.c273 u64 *gpu_addr) in radeon_bo_pin_restricted() argument
283 if (gpu_addr) in radeon_bo_pin_restricted()
284 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted()
319 if (gpu_addr != NULL) in radeon_bo_pin_restricted()
320 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted()
331 int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) in radeon_bo_pin() argument
333 return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr); in radeon_bo_pin()
H A Dradeon_sa.c91 r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in radeon_sa_bo_manager_start()
158 drm_suballoc_dump_debug_info(&sa_manager->base, &p, sa_manager->gpu_addr); in radeon_sa_bo_dump_debug_info()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_isp.c180 u64 gpu_addr; in isp_user_buffer_alloc() local
204 AMDGPU_GEM_DOMAIN_GTT, &bo, &gpu_addr); in isp_user_buffer_alloc()
211 *buf_addr = gpu_addr; in isp_user_buffer_alloc()
251 void **buf_obj, u64 *gpu_addr, void **cpu_addr) in isp_kernel_buffer_alloc() argument
266 if (WARN_ON(!gpu_addr)) in isp_kernel_buffer_alloc()
290 gpu_addr, in isp_kernel_buffer_alloc()
313 void isp_kernel_buffer_free(void **buf_obj, u64 *gpu_addr, void **cpu_addr) in isp_kernel_buffer_free() argument
317 amdgpu_bo_free_kernel(bo, gpu_addr, cpu_addr); in isp_kernel_buffer_free()
H A Dmes_userqueue.c110 queue->wptr_obj.gpu_addr = amdgpu_bo_gpu_offset(wptr_obj->obj); in mes_userq_create_wptr_mapping()
155 queue_input.process_context_addr = ctx->gpu_addr; in mes_userq_map()
156 queue_input.gang_context_addr = ctx->gpu_addr + AMDGPU_USERQ_PROC_CTX_SZ; in mes_userq_map()
162 queue_input.mqd_addr = queue->mqd.gpu_addr; in mes_userq_map()
167 queue_input.wptr_mc_addr = queue->wptr_obj.gpu_addr; in mes_userq_map()
191 queue_input.gang_context_addr = ctx->gpu_addr + AMDGPU_USERQ_PROC_CTX_SZ; in mes_userq_unmap()
304 userq_props->mqd_gpu_addr = queue->mqd.gpu_addr; in mes_userq_mqd_create()
307 userq_props->fence_address = queue->fence_drv->gpu_addr; in mes_userq_mqd_create()
463 fence_gpu_addr = adev->wb.gpu_addr + (fence_offset * 4); in mes_userq_preempt()
468 queue_input.gang_context_addr = ctx->gpu_addr in mes_userq_preempt()
[all...]
H A Dvcn_v2_5.c609 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
611 upper_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume()
620 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
622 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume()
628 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
630 upper_32_bits(adev->vcn.inst[i].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v2_5_mc_resume()
636 lower_32_bits(adev->vcn.inst[i].fw_shared.gpu_addr)); in vcn_v2_5_mc_resume()
638 upper_32_bits(adev->vcn.inst[i].fw_shared.gpu_addr)); in vcn_v2_5_mc_resume()
675 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
678 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
[all …]
H A Damdgpu_sa.c54 &sa_manager->bo, &sa_manager->gpu_addr, in amdgpu_sa_bo_manager_init()
76 amdgpu_bo_free_kernel(&sa_manager->bo, &sa_manager->gpu_addr, &sa_manager->cpu_ptr); in amdgpu_sa_bo_manager_fini()
113 drm_suballoc_dump_debug_info(&sa_manager->base, &p, sa_manager->gpu_addr); in amdgpu_sa_bo_dump_debug_info()
H A Dvcn_v4_0_5.c418 lower_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_5_mc_resume()
420 upper_32_bits(adev->vcn.inst[inst].gpu_addr)); in vcn_v4_0_5_mc_resume()
428 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_5_mc_resume()
430 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset)); in vcn_v4_0_5_mc_resume()
436 lower_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_5_mc_resume()
438 upper_32_bits(adev->vcn.inst[inst].gpu_addr + offset + AMDGPU_VCN_STACK_SIZE)); in vcn_v4_0_5_mc_resume()
444 lower_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_5_mc_resume()
446 upper_32_bits(adev->vcn.inst[inst].fw_shared.gpu_addr)); in vcn_v4_0_5_mc_resume()
496 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
499 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr), 0, indirect); in vcn_v4_0_5_mc_resume_dpg_mode()
[all …]
H A Dvcn_sw_ring.c51 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
52 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in vcn_dec_sw_ring_emit_ib()
H A Damdgpu_fence.c130 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit()
188 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit_polling()
436 ring->fence_drv.gpu_addr = ring->fence_gpu_addr; in amdgpu_fence_driver_start_ring()
441 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
450 ring->name, ring->fence_drv.gpu_addr); in amdgpu_fence_driver_start_ring()
474 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
H A Damdgpu_cgs.c209 uint64_t gpu_addr; in amdgpu_cgs_get_firmware_info() local
223 gpu_addr = ucode->mc_addr; in amdgpu_cgs_get_firmware_info()
229 gpu_addr += ALIGN(le32_to_cpu(header->header.ucode_size_bytes), PAGE_SIZE); in amdgpu_cgs_get_firmware_info()
235 info->mc_addr = gpu_addr; in amdgpu_cgs_get_firmware_info()
H A Dsdma_v6_0.c286 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v6_0_ring_emit_ib()
287 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v6_0_ring_emit_ib()
538 …WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE), ring->gpu_addr >> … in sdma_v6_0_gfx_resume_instance()
539 …WREG32_SOC15_IP(GC, sdma_v6_0_get_reg_offset(adev, i, regSDMA0_QUEUE0_RB_BASE_HI), ring->gpu_addr in sdma_v6_0_gfx_resume_instance()
926 u64 gpu_addr; in sdma_v6_0_ring_test_ring() local
936 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v6_0_ring_test_ring()
948 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v6_0_ring_test_ring()
949 amdgpu_ring_write(ring, upper_32_bits(gpu_addr)); in sdma_v6_0_ring_test_ring()
989 u64 gpu_addr; in sdma_v6_0_ring_test_ib() local
1000 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v6_0_ring_test_ib()
[all …]
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_mqd_manager.c59 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr; in allocate_hiq_mqd()
86 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in allocate_sdma_mqd()
287 mqd_mem_obj->gpu_addr = dev->dqm->hiq_sdma_mqd.gpu_addr + offset; in kfd_get_hiq_xcc_mqd()
H A Dkfd_kernel_queue.c79 kq->pq_gpu_addr = kq->pq->gpu_addr; in kq_initialize()
87 kq->eop_gpu_addr = kq->eop_mem->gpu_addr; in kq_initialize()
100 kq->rptr_gpu_addr = kq->rptr_mem->gpu_addr; in kq_initialize()
109 kq->wptr_gpu_addr = kq->wptr_mem->gpu_addr; in kq_initialize()
/linux/include/drm/amd/
H A Disp.h47 void **buf_obj, u64 *gpu_addr, void **cpu_addr);
49 void isp_kernel_buffer_free(void **buf_obj, u64 *gpu_addr, void **cpu_addr);

1234