Lines Matching refs:bo_va
295 struct radeon_bo_va *bo_va; in radeon_vm_bo_find() local
297 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
298 if (bo_va->vm == vm) in radeon_vm_bo_find()
299 return bo_va; in radeon_vm_bo_find()
322 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local
324 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add()
325 if (bo_va == NULL) in radeon_vm_bo_add()
328 bo_va->vm = vm; in radeon_vm_bo_add()
329 bo_va->bo = bo; in radeon_vm_bo_add()
330 bo_va->it.start = 0; in radeon_vm_bo_add()
331 bo_va->it.last = 0; in radeon_vm_bo_add()
332 bo_va->flags = 0; in radeon_vm_bo_add()
333 bo_va->ref_count = 1; in radeon_vm_bo_add()
334 INIT_LIST_HEAD(&bo_va->bo_list); in radeon_vm_bo_add()
335 INIT_LIST_HEAD(&bo_va->vm_status); in radeon_vm_bo_add()
338 list_add_tail(&bo_va->bo_list, &bo->va); in radeon_vm_bo_add()
341 return bo_va; in radeon_vm_bo_add()
446 struct radeon_bo_va *bo_va, in radeon_vm_bo_set_addr() argument
450 uint64_t size = radeon_bo_size(bo_va->bo); in radeon_vm_bo_set_addr()
451 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_set_addr()
482 if (it && it != &bo_va->it) { in radeon_vm_bo_set_addr()
487 "(bo %p 0x%010lx 0x%010lx)\n", bo_va->bo, in radeon_vm_bo_set_addr()
495 if (bo_va->it.start || bo_va->it.last) { in radeon_vm_bo_set_addr()
504 tmp->it.start = bo_va->it.start; in radeon_vm_bo_set_addr()
505 tmp->it.last = bo_va->it.last; in radeon_vm_bo_set_addr()
507 tmp->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_set_addr()
509 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_bo_set_addr()
511 bo_va->it.start = 0; in radeon_vm_bo_set_addr()
512 bo_va->it.last = 0; in radeon_vm_bo_set_addr()
513 list_del_init(&bo_va->vm_status); in radeon_vm_bo_set_addr()
520 bo_va->it.start = soffset; in radeon_vm_bo_set_addr()
521 bo_va->it.last = eoffset; in radeon_vm_bo_set_addr()
522 list_add(&bo_va->vm_status, &vm->cleared); in radeon_vm_bo_set_addr()
524 interval_tree_insert(&bo_va->it, &vm->va); in radeon_vm_bo_set_addr()
527 bo_va->flags = flags; in radeon_vm_bo_set_addr()
537 radeon_bo_unreserve(bo_va->bo); in radeon_vm_bo_set_addr()
580 radeon_bo_unreserve(bo_va->bo); in radeon_vm_bo_set_addr()
910 struct radeon_bo_va *bo_va, in radeon_vm_bo_update() argument
913 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_update()
920 if (!bo_va->it.start) { in radeon_vm_bo_update()
922 bo_va->bo, vm); in radeon_vm_bo_update()
928 if (list_empty(&bo_va->vm_status)) { in radeon_vm_bo_update()
932 list_del_init(&bo_va->vm_status); in radeon_vm_bo_update()
934 list_del(&bo_va->vm_status); in radeon_vm_bo_update()
935 list_add(&bo_va->vm_status, &vm->cleared); in radeon_vm_bo_update()
939 bo_va->flags &= ~RADEON_VM_PAGE_VALID; in radeon_vm_bo_update()
940 bo_va->flags &= ~RADEON_VM_PAGE_SYSTEM; in radeon_vm_bo_update()
941 bo_va->flags &= ~RADEON_VM_PAGE_SNOOPED; in radeon_vm_bo_update()
942 if (bo_va->bo && radeon_ttm_tt_is_readonly(rdev, bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
943 bo_va->flags &= ~RADEON_VM_PAGE_WRITEABLE; in radeon_vm_bo_update()
948 bo_va->flags |= RADEON_VM_PAGE_VALID; in radeon_vm_bo_update()
951 bo_va->flags |= RADEON_VM_PAGE_SYSTEM; in radeon_vm_bo_update()
952 if (!(bo_va->bo->flags & (RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC))) in radeon_vm_bo_update()
953 bo_va->flags |= RADEON_VM_PAGE_SNOOPED; in radeon_vm_bo_update()
962 trace_radeon_vm_bo_update(bo_va); in radeon_vm_bo_update()
964 nptes = bo_va->it.last - bo_va->it.start + 1; in radeon_vm_bo_update()
973 flags = radeon_vm_page_flags(bo_va->flags); in radeon_vm_bo_update()
1002 if (!(bo_va->flags & RADEON_VM_PAGE_VALID)) { in radeon_vm_bo_update()
1009 r = radeon_vm_update_ptes(rdev, vm, &ib, bo_va->it.start, in radeon_vm_bo_update()
1010 bo_va->it.last + 1, addr, in radeon_vm_bo_update()
1011 radeon_vm_page_flags(bo_va->flags)); in radeon_vm_bo_update()
1026 radeon_vm_fence_pts(vm, bo_va->it.start, bo_va->it.last + 1, ib.fence); in radeon_vm_bo_update()
1027 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_bo_update()
1028 bo_va->last_pt_update = radeon_fence_ref(ib.fence); in radeon_vm_bo_update()
1048 struct radeon_bo_va *bo_va; in radeon_vm_clear_freed() local
1053 bo_va = list_first_entry(&vm->freed, in radeon_vm_clear_freed()
1057 r = radeon_vm_bo_update(rdev, bo_va, NULL); in radeon_vm_clear_freed()
1058 radeon_bo_unref(&bo_va->bo); in radeon_vm_clear_freed()
1059 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_clear_freed()
1061 list_del(&bo_va->vm_status); in radeon_vm_clear_freed()
1062 kfree(bo_va); in radeon_vm_clear_freed()
1086 struct radeon_bo_va *bo_va; in radeon_vm_clear_invalids() local
1091 bo_va = list_first_entry(&vm->invalidated, in radeon_vm_clear_invalids()
1095 r = radeon_vm_bo_update(rdev, bo_va, NULL); in radeon_vm_clear_invalids()
1117 struct radeon_bo_va *bo_va) in radeon_vm_bo_rmv() argument
1119 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_rmv()
1121 list_del(&bo_va->bo_list); in radeon_vm_bo_rmv()
1124 if (bo_va->it.start || bo_va->it.last) in radeon_vm_bo_rmv()
1125 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_bo_rmv()
1128 list_del(&bo_va->vm_status); in radeon_vm_bo_rmv()
1129 if (bo_va->it.start || bo_va->it.last) { in radeon_vm_bo_rmv()
1130 bo_va->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_rmv()
1131 list_add(&bo_va->vm_status, &vm->freed); in radeon_vm_bo_rmv()
1133 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_bo_rmv()
1134 kfree(bo_va); in radeon_vm_bo_rmv()
1152 struct radeon_bo_va *bo_va; in radeon_vm_bo_invalidate() local
1154 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_invalidate()
1155 spin_lock(&bo_va->vm->status_lock); in radeon_vm_bo_invalidate()
1156 if (list_empty(&bo_va->vm_status) && in radeon_vm_bo_invalidate()
1157 (bo_va->it.start || bo_va->it.last)) in radeon_vm_bo_invalidate()
1158 list_add(&bo_va->vm_status, &bo_va->vm->invalidated); in radeon_vm_bo_invalidate()
1159 spin_unlock(&bo_va->vm->status_lock); in radeon_vm_bo_invalidate()
1233 struct radeon_bo_va *bo_va, *tmp; in radeon_vm_fini() local
1239 rbtree_postorder_for_each_entry_safe(bo_va, tmp, in radeon_vm_fini()
1241 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_fini()
1242 r = radeon_bo_reserve(bo_va->bo, false); in radeon_vm_fini()
1244 list_del_init(&bo_va->bo_list); in radeon_vm_fini()
1245 radeon_bo_unreserve(bo_va->bo); in radeon_vm_fini()
1246 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_fini()
1247 kfree(bo_va); in radeon_vm_fini()
1250 list_for_each_entry_safe(bo_va, tmp, &vm->freed, vm_status) { in radeon_vm_fini()
1251 radeon_bo_unref(&bo_va->bo); in radeon_vm_fini()
1252 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_fini()
1253 kfree(bo_va); in radeon_vm_fini()