Lines Matching refs:vbo
151 * @vbo: The buffer object to search for.
158 struct vmw_bo *vbo)
167 unsigned long key = (unsigned long) vbo;
179 if (entry->base.bo == &vbo->tbo) {
242 * @vbo: The buffer object.
247 struct vmw_bo *vbo)
251 bo_node = vmw_validation_find_bo_dup(ctx, vbo);
260 bo_node->hash.key = (unsigned long) vbo;
265 vmw_bo_reference(vbo);
266 val_buf->bo = &vbo->tbo;
376 * @vbo: The new backup buffer object MOB. This buffer object needs to have
382 struct vmw_bo *vbo,
393 val->new_guest_memory_bo = vbo;
423 struct vmw_bo *vbo = res->guest_memory_bo;
425 vmw_bo_placement_set(vbo,
428 ret = vmw_validation_add_bo(ctx, vbo);
497 struct vmw_bo *vbo = to_vmw_bo(&bo->base);
504 if (atomic_read(&vbo->cpu_writers))
507 if (vbo->tbo.pin_count > 0)
510 ret = ttm_bo_validate(bo, &vbo->placement, &ctx);
520 return ttm_bo_validate(bo, &vbo->placement, &ctx);
538 struct vmw_bo *vbo = to_vmw_bo(&entry->base.bo->base);
554 ret = vmw_bo_dirty_add(vbo);
563 if (vbo->dirty)
564 vmw_bo_dirty_scan(vbo);
600 struct vmw_bo *vbo = res->guest_memory_bo;
602 vmw_bo_placement_set(vbo, res->func->domain,
604 ret = vmw_validation_add_bo(ctx, vbo);
838 struct vmw_bo *vbo = to_vmw_bo(&entry->base.bo->base);
841 vmw_bo_dirty_release(vbo);