Lines Matching refs:bo
138 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_assert_locked()
152 struct amdgpu_bo *bo = vm_bo->bo; in amdgpu_vm_bo_evicted() local
157 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_evicted()
238 if (vm_bo->bo->parent) { in amdgpu_vm_bo_relocated()
282 struct amdgpu_bo *bo = vm_bo->bo; in amdgpu_vm_bo_reset_state_machine() local
285 if (!bo || bo->tbo.type != ttm_bo_type_kernel) in amdgpu_vm_bo_reset_state_machine()
287 else if (bo->parent) in amdgpu_vm_bo_reset_state_machine()
304 struct amdgpu_bo *bo = base->bo; in amdgpu_vm_update_shared() local
305 uint64_t size = amdgpu_bo_size(bo); in amdgpu_vm_update_shared()
306 uint32_t bo_memtype = amdgpu_bo_mem_stats_placement(bo); in amdgpu_vm_update_shared()
309 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_vm_update_shared()
311 shared = drm_gem_object_is_shared_for_memory_stats(&bo->tbo.base); in amdgpu_vm_update_shared()
332 void amdgpu_vm_bo_update_shared(struct amdgpu_bo *bo) in amdgpu_vm_bo_update_shared() argument
336 for (base = bo->vm_bo; base; base = base->next) in amdgpu_vm_bo_update_shared()
354 struct amdgpu_bo *bo = base->bo; in amdgpu_vm_update_stats_locked() local
355 int64_t size = sign * amdgpu_bo_size(bo); in amdgpu_vm_update_stats_locked()
356 uint32_t bo_memtype = amdgpu_bo_mem_stats_placement(bo); in amdgpu_vm_update_stats_locked()
373 if (bo->flags & AMDGPU_GEM_CREATE_DISCARDABLE) in amdgpu_vm_update_stats_locked()
375 if (!(bo->preferred_domains & amdgpu_mem_type_to_domain(res_memtype))) in amdgpu_vm_update_stats_locked()
410 struct amdgpu_vm *vm, struct amdgpu_bo *bo) in amdgpu_vm_bo_base_init() argument
413 base->bo = bo; in amdgpu_vm_bo_base_init()
417 if (!bo) in amdgpu_vm_bo_base_init()
419 base->next = bo->vm_bo; in amdgpu_vm_bo_base_init()
420 bo->vm_bo = base; in amdgpu_vm_bo_base_init()
423 base->shared = drm_gem_object_is_shared_for_memory_stats(&bo->tbo.base); in amdgpu_vm_bo_base_init()
424 amdgpu_vm_update_stats_locked(base, bo->tbo.resource, +1); in amdgpu_vm_bo_base_init()
427 if (!amdgpu_vm_is_bo_always_valid(vm, bo)) in amdgpu_vm_bo_base_init()
430 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init()
432 ttm_bo_set_bulk_move(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_bo_base_init()
433 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init()
438 if (bo->preferred_domains & in amdgpu_vm_bo_base_init()
439 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type)) in amdgpu_vm_bo_base_init()
463 return drm_exec_prepare_obj(exec, &vm->root.bo->tbo.base, in amdgpu_vm_lock_pd()
480 struct amdgpu_bo *bo; in amdgpu_vm_lock_done_list() local
488 bo = bo_va->base.bo; in amdgpu_vm_lock_done_list()
489 if (bo) { in amdgpu_vm_lock_done_list()
490 amdgpu_bo_ref(bo); in amdgpu_vm_lock_done_list()
493 ret = drm_exec_prepare_obj(exec, &bo->tbo.base, 1); in amdgpu_vm_lock_done_list()
494 amdgpu_bo_unref(&bo); in amdgpu_vm_lock_done_list()
594 int (*validate)(void *p, struct amdgpu_bo *bo), in amdgpu_vm_validate() argument
599 struct amdgpu_bo *bo; in amdgpu_vm_validate() local
618 bo = bo_base->bo; in amdgpu_vm_validate()
620 r = validate(param, bo); in amdgpu_vm_validate()
624 if (bo->tbo.type != ttm_bo_type_kernel) { in amdgpu_vm_validate()
627 vm->update_funcs->map_table(to_amdgpu_bo_vm(bo)); in amdgpu_vm_validate()
638 bo = bo_base->bo; in amdgpu_vm_validate()
639 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_vm_validate()
641 r = validate(param, bo); in amdgpu_vm_validate()
914 struct amdgpu_bo *bo) in amdgpu_vm_bo_find() argument
918 for (base = bo->vm_bo; base; base = base->next) { in amdgpu_vm_bo_find()
1078 dma_resv_add_fence(vm->root.bo->tbo.base.resv, *fence, in amdgpu_vm_tlb_flush()
1159 amdgpu_bo_fence(vm->root.bo, vm->last_unlocked, true); in amdgpu_vm_update_range()
1264 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update() local
1284 r = amdgpu_sync_resv(adev, &sync, vm->root.bo->tbo.base.resv, in amdgpu_vm_bo_update()
1288 if (bo) { in amdgpu_vm_bo_update()
1289 r = amdgpu_sync_kfd(&sync, bo->tbo.base.resv); in amdgpu_vm_bo_update()
1293 } else if (!bo) { in amdgpu_vm_bo_update()
1299 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_vm_bo_update()
1308 bo = gem_to_amdgpu_bo(gobj); in amdgpu_vm_bo_update()
1310 mem = bo->tbo.resource; in amdgpu_vm_bo_update()
1313 pages_addr = bo->tbo.ttm->dma_address; in amdgpu_vm_bo_update()
1316 r = amdgpu_sync_resv(adev, &sync, bo->tbo.base.resv, in amdgpu_vm_bo_update()
1322 if (bo) { in amdgpu_vm_bo_update()
1325 flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem); in amdgpu_vm_bo_update()
1327 if (amdgpu_bo_encrypted(bo)) in amdgpu_vm_bo_update()
1330 bo_adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_vm_bo_update()
1332 uncached = (bo->flags & AMDGPU_GEM_CREATE_UNCACHED) != 0; in amdgpu_vm_bo_update()
1339 if (clear || amdgpu_vm_is_bo_always_valid(vm, bo)) in amdgpu_vm_bo_update()
1364 amdgpu_gmc_get_vm_pte(adev, vm, bo, mapping->flags, in amdgpu_vm_bo_update()
1382 if (amdgpu_vm_is_bo_always_valid(vm, bo)) { in amdgpu_vm_bo_update()
1383 if (bo->tbo.resource && in amdgpu_vm_bo_update()
1384 !(bo->preferred_domains & in amdgpu_vm_bo_update()
1385 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type))) in amdgpu_vm_bo_update()
1521 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_prt_fini()
1562 r = amdgpu_sync_resv(adev, &sync, vm->root.bo->tbo.base.resv, in amdgpu_vm_clear_freed()
1634 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
1662 drm_gem_is_imported(&bo_va->base.bo->tbo.base) && in amdgpu_vm_handle_moved()
1663 (!bo_va->base.bo->tbo.resource || in amdgpu_vm_handle_moved()
1664 bo_va->base.bo->tbo.resource->mem_type == TTM_PL_SYSTEM)) in amdgpu_vm_handle_moved()
1736 struct amdgpu_bo *bo) in amdgpu_vm_bo_add() argument
1744 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
1751 if (!bo) in amdgpu_vm_bo_add()
1754 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_vm_bo_add()
1755 if (amdgpu_dmabuf_is_xgmi_accessible(adev, bo)) { in amdgpu_vm_bo_add()
1779 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map() local
1788 if (amdgpu_vm_is_bo_always_valid(vm, bo) && !bo_va->base.moved) in amdgpu_vm_bo_insert_map()
1796 struct amdgpu_bo *bo, in amdgpu_vm_verify_parameters() argument
1814 if (bo && offset + size > amdgpu_bo_size(bo)) in amdgpu_vm_verify_parameters()
1848 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map() local
1853 r = amdgpu_vm_verify_parameters(adev, bo, saddr, offset, size); in amdgpu_vm_bo_map()
1864 "0x%010Lx-0x%010Lx\n", bo, saddr, eaddr, in amdgpu_vm_bo_map()
1907 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map() local
1911 r = amdgpu_vm_verify_parameters(adev, bo, saddr, offset, size); in amdgpu_vm_bo_replace_map()
2095 struct amdgpu_bo *bo = before->bo_va->base.bo; in amdgpu_vm_bo_clear_mappings() local
2101 if (amdgpu_vm_is_bo_always_valid(vm, bo) && in amdgpu_vm_bo_clear_mappings()
2110 struct amdgpu_bo *bo = after->bo_va->base.bo; in amdgpu_vm_bo_clear_mappings() local
2116 if (amdgpu_vm_is_bo_always_valid(vm, bo) && in amdgpu_vm_bo_clear_mappings()
2161 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2162 struct amdgpu_bo *bo; in amdgpu_vm_bo_trace_cs() local
2164 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2165 if (dma_resv_locking_ctx(bo->tbo.base.resv) != in amdgpu_vm_bo_trace_cs()
2188 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_del() local
2192 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_del()
2194 if (bo) { in amdgpu_vm_bo_del()
2195 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_vm_bo_del()
2196 if (amdgpu_vm_is_bo_always_valid(vm, bo)) in amdgpu_vm_bo_del()
2197 ttm_bo_set_bulk_move(&bo->tbo, NULL); in amdgpu_vm_bo_del()
2199 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_del()
2204 amdgpu_vm_update_stats(*base, bo->tbo.resource, -1); in amdgpu_vm_bo_del()
2230 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_del()
2243 bool amdgpu_vm_evictable(struct amdgpu_bo *bo) in amdgpu_vm_evictable() argument
2245 struct amdgpu_vm_bo_base *bo_base = bo->vm_bo; in amdgpu_vm_evictable()
2252 if (!dma_resv_test_signaled(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP)) in amdgpu_vm_evictable()
2278 void amdgpu_vm_bo_invalidate(struct amdgpu_bo *bo, bool evicted) in amdgpu_vm_bo_invalidate() argument
2282 for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) { in amdgpu_vm_bo_invalidate()
2285 if (evicted && amdgpu_vm_is_bo_always_valid(vm, bo)) { in amdgpu_vm_bo_invalidate()
2294 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_invalidate()
2296 else if (amdgpu_vm_is_bo_always_valid(vm, bo)) in amdgpu_vm_bo_invalidate()
2312 void amdgpu_vm_bo_move(struct amdgpu_bo *bo, struct ttm_resource *new_mem, in amdgpu_vm_bo_move() argument
2317 for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) { in amdgpu_vm_bo_move()
2321 amdgpu_vm_update_stats_locked(bo_base, bo->tbo.resource, -1); in amdgpu_vm_bo_move()
2326 amdgpu_vm_bo_invalidate(bo, evicted); in amdgpu_vm_bo_move()
2633 root_bo = amdgpu_bo_ref(&root->bo); in amdgpu_vm_init()
2662 amdgpu_bo_unreserve(vm->root.bo); in amdgpu_vm_init()
2674 amdgpu_bo_unreserve(vm->root.bo); in amdgpu_vm_init()
2709 r = amdgpu_bo_reserve(vm->root.bo, true); in amdgpu_vm_make_compute()
2724 r = amdgpu_bo_sync_wait(vm->root.bo, in amdgpu_vm_make_compute()
2743 amdgpu_bo_unreserve(vm->root.bo); in amdgpu_vm_make_compute()
2776 root = amdgpu_bo_ref(vm->root.bo); in amdgpu_vm_fini()
2804 WARN_ON(vm->root.bo); in amdgpu_vm_fini()
2960 root = amdgpu_bo_ref(vm->root.bo); in amdgpu_vm_handle_fault()
2985 if (vm && vm->root.bo != root) in amdgpu_vm_handle_fault()
3066 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3068 total_idle += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3075 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3077 total_evicted += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3084 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3086 total_relocated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3093 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3095 total_moved += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3102 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3104 total_invalidated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3111 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3113 total_done += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3199 bool amdgpu_vm_is_bo_always_valid(struct amdgpu_vm *vm, struct amdgpu_bo *bo) in amdgpu_vm_is_bo_always_valid() argument
3201 return bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv; in amdgpu_vm_is_bo_always_valid()