Home
last modified time | relevance | path

Searched refs:bo_va (Results 1 – 25 of 28) sorted by relevance

12

/linux/drivers/gpu/drm/radeon/
H A Dradeon_vm.c295 struct radeon_bo_va *bo_va; in radeon_vm_bo_find() local
297 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
298 if (bo_va->vm == vm) in radeon_vm_bo_find()
299 return bo_va; in radeon_vm_bo_find()
322 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local
324 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add()
325 if (bo_va == NULL) in radeon_vm_bo_add()
328 bo_va->vm = vm; in radeon_vm_bo_add()
329 bo_va->bo = bo; in radeon_vm_bo_add()
330 bo_va->it.start = 0; in radeon_vm_bo_add()
[all …]
H A Dradeon_gem.c202 struct radeon_bo_va *bo_va; in radeon_gem_object_open() local
215 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_open()
216 if (!bo_va) { in radeon_gem_object_open()
217 bo_va = radeon_vm_bo_add(rdev, vm, rbo); in radeon_gem_object_open()
219 ++bo_va->ref_count; in radeon_gem_object_open()
233 struct radeon_bo_va *bo_va; in radeon_gem_object_close() local
247 bo_va = radeon_vm_bo_find(vm, rbo); in radeon_gem_object_close()
248 if (bo_va) { in radeon_gem_object_close()
249 if (--bo_va->ref_count == 0) { in radeon_gem_object_close()
250 radeon_vm_bo_rmv(rdev, bo_va); in radeon_gem_object_close()
[all …]
H A Dradeon_trace.h66 TP_PROTO(struct radeon_bo_va *bo_va),
67 TP_ARGS(bo_va),
75 __entry->soffset = bo_va->it.start;
76 __entry->eoffset = bo_va->it.last + 1;
77 __entry->flags = bo_va->flags;
H A Dradeon_cs.c507 struct radeon_bo_va *bo_va; in radeon_bo_vm_update_pte() local
532 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte()
533 if (bo_va == NULL) { in radeon_bo_vm_update_pte()
538 r = radeon_vm_bo_update(rdev, bo_va, bo->tbo.resource); in radeon_bo_vm_update_pte()
542 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte()
/linux/drivers/gpu/drm/lima/
H A Dlima_vm.c79 struct lima_bo_va *bo_va, *ret = NULL; in lima_vm_bo_find() local
81 list_for_each_entry(bo_va, &bo->va, list) { in lima_vm_bo_find()
82 if (bo_va->vm == vm) { in lima_vm_bo_find()
83 ret = bo_va; in lima_vm_bo_find()
93 struct lima_bo_va *bo_va; in lima_vm_bo_add() local
99 bo_va = lima_vm_bo_find(vm, bo); in lima_vm_bo_add()
100 if (bo_va) { in lima_vm_bo_add()
101 bo_va->ref_count++; in lima_vm_bo_add()
112 bo_va = kzalloc(sizeof(*bo_va), GFP_KERNEL); in lima_vm_bo_add()
113 if (!bo_va) { in lima_vm_bo_add()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm.c1217 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
1220 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1221 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1257 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1298 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1300 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1302 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1304 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1305 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1308 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
[all …]
H A Damdgpu_csa.c66 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, in amdgpu_map_static_csa() argument
84 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in amdgpu_map_static_csa()
85 if (!*bo_va) { in amdgpu_map_static_csa()
90 r = amdgpu_vm_bo_map(adev, *bo_va, csa_addr, 0, size, in amdgpu_map_static_csa()
96 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_map_static_csa()
106 struct amdgpu_bo *bo, struct amdgpu_bo_va *bo_va, in amdgpu_unmap_static_csa() argument
124 r = amdgpu_vm_bo_unmap(adev, bo_va, csa_addr); in amdgpu_unmap_static_csa()
130 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_unmap_static_csa()
H A Damdgpu_seq64.c64 struct amdgpu_bo_va **bo_va) in amdgpu_seq64_map() argument
85 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in amdgpu_seq64_map()
86 if (!*bo_va) { in amdgpu_seq64_map()
92 r = amdgpu_vm_bo_map(adev, *bo_va, seq64_addr, 0, AMDGPU_VA_RESERVED_SEQ64_SIZE, in amdgpu_seq64_map()
96 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_seq64_map()
100 r = amdgpu_vm_bo_update(adev, *bo_va, false); in amdgpu_seq64_map()
103 amdgpu_vm_bo_del(adev, *bo_va); in amdgpu_seq64_map()
H A Damdgpu_gem.c165 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_open() local
182 bo_va = amdgpu_vm_bo_find(vm, abo); in amdgpu_gem_object_open()
183 if (!bo_va) in amdgpu_gem_object_open()
184 bo_va = amdgpu_vm_bo_add(adev, vm, abo); in amdgpu_gem_object_open()
186 ++bo_va->ref_count; in amdgpu_gem_object_open()
233 struct amdgpu_bo_va *bo_va; in amdgpu_gem_object_close() local
250 bo_va = amdgpu_vm_bo_find(vm, bo); in amdgpu_gem_object_close()
251 if (!bo_va || --bo_va->ref_count) in amdgpu_gem_object_close()
254 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_gem_object_close()
644 struct amdgpu_bo_va *bo_va, in amdgpu_gem_va_update_vm() argument
[all …]
H A Damdgpu_amdkfd_gpuvm.c83 if (entry->bo_va->base.vm == avm) in kfd_mem_is_attached()
558 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_userptr()
605 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_dmabuf()
648 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmamap_sg_bo()
725 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_userptr()
771 struct amdgpu_bo *bo = attachment->bo_va->base.bo; in kfd_mem_dmaunmap_sg_bo()
876 struct amdgpu_bo_va *bo_va; in kfd_mem_attach() local
964 bo_va = amdgpu_vm_bo_find(vm, bo[i]); in kfd_mem_attach()
965 if (!bo_va) in kfd_mem_attach()
966 bo_va = amdgpu_vm_bo_add(adev, vm, bo[i]); in kfd_mem_attach()
[all …]
H A Damdgpu_umsch_mm.c76 struct amdgpu_bo_va *bo_va; member
83 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va, in map_ring_data() argument
105 *bo_va = amdgpu_vm_bo_add(adev, vm, bo); in map_ring_data()
106 if (!*bo_va) { in map_ring_data()
111 r = amdgpu_vm_bo_map(adev, *bo_va, addr, 0, size, in map_ring_data()
119 r = amdgpu_vm_bo_update(adev, *bo_va, false); in map_ring_data()
123 amdgpu_sync_fence(&sync, (*bo_va)->last_pt_update); in map_ring_data()
139 amdgpu_vm_bo_del(adev, *bo_va); in map_ring_data()
149 struct amdgpu_bo *bo, struct amdgpu_bo_va *bo_va, in unmap_ring_data() argument
169 r = amdgpu_vm_bo_unmap(adev, bo_va, addr); in unmap_ring_data()
[all …]
H A Damdgpu_csa.h35 struct amdgpu_bo *bo, struct amdgpu_bo_va **bo_va,
38 struct amdgpu_bo *bo, struct amdgpu_bo_va *bo_va,
H A Damdgpu_trace.h246 TP_PROTO(struct amdgpu_bo_va *bo_va,
248 TP_ARGS(bo_va, mapping),
258 __entry->bo = bo_va ? bo_va->base.bo : NULL;
270 TP_PROTO(struct amdgpu_bo_va *bo_va,
272 TP_ARGS(bo_va, mapping),
282 __entry->bo = bo_va ? bo_va->base.bo : NULL;
H A Damdgpu_cs.c917 e->bo_va = amdgpu_vm_bo_find(vm, e->bo); in amdgpu_cs_parser_bos()
1100 struct amdgpu_bo_va *bo_va; in amdgpu_cs_vm_handling() local
1132 bo_va = fpriv->csa_va; in amdgpu_cs_vm_handling()
1133 BUG_ON(!bo_va); in amdgpu_cs_vm_handling()
1134 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_cs_vm_handling()
1138 r = amdgpu_sync_fence(&p->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling()
1149 bo_va = e->bo_va; in amdgpu_cs_vm_handling()
1150 if (bo_va == NULL) in amdgpu_cs_vm_handling()
1153 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_cs_vm_handling()
1157 r = amdgpu_sync_fence(&p->sync, bo_va->last_pt_update); in amdgpu_cs_vm_handling()
[all …]
H A Damdgpu_vm.h529 struct amdgpu_bo_va *bo_va,
545 struct amdgpu_bo_va *bo_va,
549 struct amdgpu_bo_va *bo_va,
553 struct amdgpu_bo_va *bo_va,
562 struct amdgpu_bo_va *bo_va);
H A Damdgpu_mes.c1296 struct amdgpu_bo_va *bo_va; in amdgpu_mes_ctx_map_meta_data() local
1317 bo_va = amdgpu_vm_bo_add(adev, vm, ctx_data->meta_data_obj); in amdgpu_mes_ctx_map_meta_data()
1318 if (!bo_va) { in amdgpu_mes_ctx_map_meta_data()
1324 r = amdgpu_vm_bo_map(adev, bo_va, ctx_data->meta_data_gpu_addr, 0, in amdgpu_mes_ctx_map_meta_data()
1334 r = amdgpu_vm_bo_update(adev, bo_va, false); in amdgpu_mes_ctx_map_meta_data()
1339 amdgpu_sync_fence(&sync, bo_va->last_pt_update); in amdgpu_mes_ctx_map_meta_data()
1352 ctx_data->meta_data_va = bo_va; in amdgpu_mes_ctx_map_meta_data()
1356 amdgpu_vm_bo_del(adev, bo_va); in amdgpu_mes_ctx_map_meta_data()
1367 struct amdgpu_bo_va *bo_va = ctx_data->meta_data_va; in amdgpu_mes_ctx_unmap_meta_data() local
1369 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_mes_ctx_unmap_meta_data()
[all …]
H A Damdgpu_seq64.h44 struct amdgpu_bo_va **bo_va);
H A Damdgpu_bo_list.h39 struct amdgpu_bo_va *bo_va; member
H A Damdgpu_object.h65 struct amdgpu_bo_va *bo_va; member
H A Damdgpu_amdkfd.h63 struct amdgpu_bo_va *bo_va; member
H A Dgmc_v9_0.c1144 struct amdgpu_vm *vm = mapping->bo_va->base.vm; in gmc_v9_0_get_coherence_flags()
1174 if (mapping->bo_va->is_xgmi) in gmc_v9_0_get_coherence_flags()
1255 struct amdgpu_bo *bo = mapping->bo_va->base.bo; in gmc_v9_0_get_vm_pte()
H A Dvcn_v1_0.c2028 if (!mapping || !mapping->bo_va || !mapping->bo_va->base.bo) in vcn_v1_0_validate_bo()
2031 bo = mapping->bo_va->base.bo; in vcn_v1_0_validate_bo()
H A Dgmc_v12_0.c503 struct amdgpu_bo *bo = mapping->bo_va->base.bo; in gmc_v12_0_get_vm_pte()
H A Dgmc_v11_0.c485 struct amdgpu_bo *bo = mapping->bo_va->base.bo; in gmc_v11_0_get_vm_pte()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_queue.c218 *pbo = amdgpu_bo_ref(mapping->bo_va->base.bo); in kfd_queue_buffer_get()
219 mapping->bo_va->queue_refcount++; in kfd_queue_buffer_get()
356 struct amdgpu_bo_va *bo_va; in kfd_queue_unref_bo_va() local
358 bo_va = amdgpu_vm_bo_find(vm, *bo); in kfd_queue_unref_bo_va()
359 if (bo_va && bo_va->queue_refcount) in kfd_queue_unref_bo_va()
360 bo_va->queue_refcount--; in kfd_queue_unref_bo_va()

12