Lines Matching refs:bo

57 	struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj);  in amdgpu_dma_buf_attach()  local
58 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach()
76 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_pin() local
79 return amdgpu_bo_pin(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_dma_buf_pin()
92 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_unpin() local
94 amdgpu_bo_unpin(bo); in amdgpu_dma_buf_unpin()
115 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_map() local
116 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map()
120 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map()
125 if (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM && in amdgpu_dma_buf_map()
127 bo->flags |= AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED; in amdgpu_dma_buf_map()
130 amdgpu_bo_placement_from_domain(bo, domains); in amdgpu_dma_buf_map()
131 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map()
135 } else if (bo->tbo.resource->mem_type != TTM_PL_TT) { in amdgpu_dma_buf_map()
139 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map()
142 bo->tbo.ttm->pages, in amdgpu_dma_buf_map()
143 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map()
153 r = amdgpu_vram_mgr_alloc_sgt(adev, bo->tbo.resource, 0, in amdgpu_dma_buf_map()
154 bo->tbo.base.size, attach->dev, in amdgpu_dma_buf_map()
208 struct amdgpu_bo *bo = gem_to_amdgpu_bo(dma_buf->priv); in amdgpu_dma_buf_begin_cpu_access() local
209 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_begin_cpu_access()
211 u32 domain = amdgpu_display_supported_domains(adev, bo->flags); in amdgpu_dma_buf_begin_cpu_access()
220 ret = amdgpu_bo_reserve(bo, false); in amdgpu_dma_buf_begin_cpu_access()
224 if (!bo->tbo.pin_count && in amdgpu_dma_buf_begin_cpu_access()
225 (bo->allowed_domains & AMDGPU_GEM_DOMAIN_GTT)) { in amdgpu_dma_buf_begin_cpu_access()
226 amdgpu_bo_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_dma_buf_begin_cpu_access()
227 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_begin_cpu_access()
230 amdgpu_bo_unreserve(bo); in amdgpu_dma_buf_begin_cpu_access()
260 struct amdgpu_bo *bo = gem_to_amdgpu_bo(gobj); in amdgpu_gem_prime_export() local
263 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_export()
264 bo->flags & AMDGPU_GEM_CREATE_VM_ALWAYS_VALID) in amdgpu_gem_prime_export()
292 struct amdgpu_bo *bo; in amdgpu_dma_buf_create_obj() local
313 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dma_buf_create_obj()
314 bo->allowed_domains = AMDGPU_GEM_DOMAIN_GTT; in amdgpu_dma_buf_create_obj()
315 bo->preferred_domains = AMDGPU_GEM_DOMAIN_GTT; in amdgpu_dma_buf_create_obj()
338 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_dma_buf_move_notify() local
339 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_move_notify()
348 amdgpu_vm_bo_invalidate(adev, bo, false); in amdgpu_dma_buf_move_notify()
349 if (!bo->tbo.resource || bo->tbo.resource->mem_type == TTM_PL_SYSTEM) in amdgpu_dma_buf_move_notify()
352 r = ttm_bo_validate(&bo->tbo, &placement, &ctx); in amdgpu_dma_buf_move_notify()
358 for (bo_base = bo->vm_bo; bo_base; bo_base = bo_base->next) { in amdgpu_dma_buf_move_notify()
360 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify()
454 struct amdgpu_bo *bo) in amdgpu_dmabuf_is_xgmi_accessible() argument
456 struct drm_gem_object *obj = &bo->tbo.base; in amdgpu_dmabuf_is_xgmi_accessible()
467 bo = gem_to_amdgpu_bo(gobj); in amdgpu_dmabuf_is_xgmi_accessible()
470 if (amdgpu_xgmi_same_hive(adev, amdgpu_ttm_adev(bo->tbo.bdev)) && in amdgpu_dmabuf_is_xgmi_accessible()
471 (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM)) in amdgpu_dmabuf_is_xgmi_accessible()