Home
last modified time | relevance | path

Searched refs:xcp_id (Results 1 – 21 of 21) sorted by relevance

/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_xcp.h133 int (*get_ip_details)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id,
141 int (*prepare_suspend)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
142 int (*suspend)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
143 int (*prepare_resume)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
144 int (*resume)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
151 int amdgpu_xcp_prepare_suspend(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
152 int amdgpu_xcp_suspend(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
153 int amdgpu_xcp_prepare_resume(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
154 int amdgpu_xcp_resume(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
H A Daqua_vanjaram.c33 #define XCP_INST_MASK(num_inst, xcp_id) \ argument
34 (num_inst ? GENMASK(num_inst - 1, 0) << (xcp_id * num_inst) : 0)
73 int xcp_id; in aqua_vanjaram_set_xcp_id() local
77 ring->xcp_id = AMDGPU_XCP_NO_PARTITION; in aqua_vanjaram_set_xcp_id()
79 adev->gfx.enforce_isolation[0].xcp_id = ring->xcp_id; in aqua_vanjaram_set_xcp_id()
103 for (xcp_id = 0; xcp_id < adev->xcp_mgr->num_xcps; xcp_id++) { in aqua_vanjaram_set_xcp_id()
104 if (adev->xcp_mgr->xcp[xcp_id].ip[ip_blk].inst_mask & inst_mask) { in aqua_vanjaram_set_xcp_id()
105 ring->xcp_id = xcp_id; in aqua_vanjaram_set_xcp_id()
107 ring->xcp_id); in aqua_vanjaram_set_xcp_id()
109 adev->gfx.enforce_isolation[xcp_id].xcp_id = xcp_id; in aqua_vanjaram_set_xcp_id()
[all …]
H A Damdgpu_amdkfd.h256 uint32_t *flags, int8_t *xcp_id);
269 int amdgpu_amdkfd_config_sq_perfmon(struct amdgpu_device *adev, uint32_t xcp_id,
312 uint8_t xcp_id);
358 uint64_t size, u32 alloc_flag, int8_t xcp_id);
360 uint64_t size, u32 alloc_flag, int8_t xcp_id);
362 u64 amdgpu_amdkfd_xcp_memory_size(struct amdgpu_device *adev, int xcp_id);
364 #define KFD_XCP_MEM_ID(adev, xcp_id) \ argument
365 ((adev)->xcp_mgr && (xcp_id) >= 0 ?\
366 (adev)->xcp_mgr->xcp[(xcp_id)].mem_id : -1)
368 #define KFD_XCP_MEMORY_SIZE(adev, xcp_id) amdgpu_amdkfd_xcp_memory_size((adev), (xcp_id)) argument
H A Damdgpu_amdkfd_gpuvm.c170 uint64_t size, u32 alloc_flag, int8_t xcp_id) in amdgpu_amdkfd_reserve_mem_limit() argument
196 if (WARN_ONCE(xcp_id < 0, "invalid XCP ID %d", xcp_id)) in amdgpu_amdkfd_reserve_mem_limit()
199 vram_size = KFD_XCP_MEMORY_SIZE(adev, xcp_id); in amdgpu_amdkfd_reserve_mem_limit()
223 (adev && xcp_id >= 0 && adev->kfd.vram_used[xcp_id] + vram_needed > in amdgpu_amdkfd_reserve_mem_limit()
234 if (adev && xcp_id >= 0) { in amdgpu_amdkfd_reserve_mem_limit()
235 adev->kfd.vram_used[xcp_id] += vram_needed; in amdgpu_amdkfd_reserve_mem_limit()
236 adev->kfd.vram_used_aligned[xcp_id] += in amdgpu_amdkfd_reserve_mem_limit()
250 uint64_t size, u32 alloc_flag, int8_t xcp_id) in amdgpu_amdkfd_unreserve_mem_limit() argument
260 if (WARN_ONCE(xcp_id < 0, "invalid XCP ID %d", xcp_id)) in amdgpu_amdkfd_unreserve_mem_limit()
264 adev->kfd.vram_used[xcp_id] -= size; in amdgpu_amdkfd_unreserve_mem_limit()
[all …]
H A Damdgpu_amdkfd.c506 uint32_t *flags, int8_t *xcp_id) in amdgpu_amdkfd_get_dmabuf_info() argument
550 if (xcp_id) in amdgpu_amdkfd_get_dmabuf_info()
551 *xcp_id = bo->xcp_id; in amdgpu_amdkfd_get_dmabuf_info()
797 u64 amdgpu_amdkfd_xcp_memory_size(struct amdgpu_device *adev, int xcp_id) in amdgpu_amdkfd_xcp_memory_size() argument
799 s8 mem_id = KFD_XCP_MEM_ID(adev, xcp_id); in amdgpu_amdkfd_xcp_memory_size()
802 if (adev->gmc.num_mem_partitions && xcp_id >= 0 && mem_id >= 0) { in amdgpu_amdkfd_xcp_memory_size()
912 int amdgpu_amdkfd_config_sq_perfmon(struct amdgpu_device *adev, uint32_t xcp_id, in amdgpu_amdkfd_config_sq_perfmon() argument
920 r = psp_config_sq_perfmon(&adev->psp, xcp_id, core_override_enable, in amdgpu_amdkfd_config_sq_perfmon()
H A Damdgpu_gfx.c1456 static int amdgpu_gfx_run_cleaner_shader(struct amdgpu_device *adev, int xcp_id) in amdgpu_gfx_run_cleaner_shader() argument
1471 if ((ring->xcp_id == xcp_id) && ring->sched.ready) { in amdgpu_gfx_run_cleaner_shader()
1910 if (isolation_work->xcp_id == AMDGPU_XCP_NO_PARTITION) in amdgpu_gfx_enforce_isolation_handler()
1913 idx = isolation_work->xcp_id; in amdgpu_gfx_enforce_isolation_handler()
1920 if (isolation_work->xcp_id == adev->gfx.gfx_ring[i].xcp_id) in amdgpu_gfx_enforce_isolation_handler()
1924 if (isolation_work->xcp_id == adev->gfx.compute_ring[i].xcp_id) in amdgpu_gfx_enforce_isolation_handler()
1996 if (ring->xcp_id == AMDGPU_XCP_NO_PARTITION) in amdgpu_gfx_enforce_isolation_ring_begin_use()
1999 idx = ring->xcp_id; in amdgpu_gfx_enforce_isolation_ring_begin_use()
2023 if (ring->xcp_id == AMDGPU_XCP_NO_PARTITION) in amdgpu_gfx_enforce_isolation_ring_end_use()
2026 idx = ring->xcp_id; in amdgpu_gfx_enforce_isolation_ring_end_use()
H A Damdgpu_vm_pt.c440 int32_t xcp_id) in amdgpu_vm_pt_create() argument
471 bp.xcp_id_plus1 = xcp_id + 1; in amdgpu_vm_pt_create()
508 vm->root.bo->xcp_id); in amdgpu_vm_pt_alloc()
H A Damdgpu_ids.c487 (adev->enforce_isolation[(vm->root.bo->xcp_id != AMDGPU_XCP_NO_PARTITION) ? in amdgpu_vmid_uses_reserved()
488 vm->root.bo->xcp_id : 0] && in amdgpu_vmid_uses_reserved()
H A Damdgpu_vm.h489 int amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, int32_t xcp_id);
586 int32_t xcp_id);
H A Damdgpu_gem.c376 flags, ttm_bo_type_device, resv, &gobj, fpriv->xcp_id + 1); in amdgpu_gem_create_ioctl()
446 0, ttm_bo_type_device, NULL, &gobj, fpriv->xcp_id + 1); in amdgpu_gem_userptr_ioctl()
971 ttm_bo_type_device, NULL, &gobj, fpriv->xcp_id + 1); in amdgpu_mode_dumb_create()
H A Damdgpu_object.h125 int8_t xcp_id; member
H A Damdgpu_object.c118 int8_t mem_id = KFD_XCP_MEM_ID(adev, abo->xcp_id); in amdgpu_bo_placement_from_domain()
583 bo->xcp_id = bp->xcp_id_plus1 - 1; in amdgpu_bo_create()
586 bo->xcp_id = 0; in amdgpu_bo_create()
H A Damdgpu_psp.h563 int psp_config_sq_perfmon(struct psp_context *psp, uint32_t xcp_id,
H A Damdgpu_gfx.h355 u32 xcp_id; member
H A Damdgpu_kms.c622 fpriv->xcp_id < adev->xcp_mgr->num_xcps) { in amdgpu_info_ioctl()
623 xcp = &adev->xcp_mgr->xcp[fpriv->xcp_id]; in amdgpu_info_ioctl()
1337 r = amdgpu_vm_init(adev, &fpriv->vm, fpriv->xcp_id); in amdgpu_driver_open_kms()
H A Damdgpu_ring.h263 u32 xcp_id; member
H A Damdgpu_ttm.c1111 if (adev->gmc.mem_partitions && abo->xcp_id >= 0) in amdgpu_ttm_tt_create()
1112 gtt->pool_id = KFD_XCP_MEM_ID(adev, abo->xcp_id); in amdgpu_ttm_tt_create()
1114 gtt->pool_id = abo->xcp_id; in amdgpu_ttm_tt_create()
H A Damdgpu_vm.c2425 int32_t xcp_id) in amdgpu_vm_init() argument
2479 false, &root, xcp_id); in amdgpu_vm_init()
H A Dgmc_v9_0.c1206 KFD_XCP_MEM_ID(adev, bo->xcp_id) == vm->mem_id); in gmc_v9_0_get_coherence_flags()
H A Damdgpu_device.c4266 adev->gfx.enforce_isolation[i].xcp_id = i; in amdgpu_device_init()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_chardev.c1502 int8_t xcp_id; in kfd_ioctl_get_dmabuf_info() local
1523 &args->metadata_size, &flags, &xcp_id); in kfd_ioctl_get_dmabuf_info()
1527 if (xcp_id >= 0) in kfd_ioctl_get_dmabuf_info()
1528 args->gpu_id = dmabuf_adev->kfd.dev->nodes[xcp_id]->id; in kfd_ioctl_get_dmabuf_info()