Lines Matching refs:va
70 } va;
174 u64 addr = reg->va.addr;
175 u64 range = reg->va.range;
183 u64 addr = uvma->va.va.addr;
184 u64 range = uvma->va.va.range;
193 u64 addr = uvma->va.va.addr;
194 u64 offset = uvma->va.gem.offset;
195 u64 range = uvma->va.va.range;
204 u64 addr = uvma->va.va.addr;
205 u64 range = uvma->va.va.range;
208 if (drm_gpuva_invalidated(&uvma->va))
233 drm_gem_object_get(uvma->va.gem.obj);
239 drm_gem_object_put(uvma->va.gem.obj);
279 u64 addr = reg->va.addr;
280 u64 range = reg->va.range;
308 reg->va.addr = addr;
309 reg->va.range = range;
322 MA_STATE(mas, &uvmm->region_mt, reg->va.addr, 0);
377 if (reg->va.addr != addr ||
378 reg->va.range != range)
390 reg->va.addr,
391 reg->va.range);
398 u64 addr = reg->va.addr;
399 u64 range = reg->va.range;
441 struct drm_gpuva *va = &uvma->va;
443 drm_gpuva_remove(va);
448 op_unmap_prepare_unwind(struct drm_gpuva *va)
450 drm_gpuva_insert(va->vm, va);
472 struct drm_gpuva *va = r->unmap->va;
480 op_unmap_prepare_unwind(va);
484 op_unmap_prepare_unwind(op->unmap.va);
509 struct drm_gpuva *va = r->unmap->va;
510 u64 ustart = va->va.addr;
511 u64 urange = va->va.range;
527 struct drm_gpuva *va = u->va;
528 u64 ustart = va->va.addr;
529 u64 urange = va->va.range;
596 drm_gpuva_map(&uvmm->base, &uvma->va, op);
647 struct drm_gpuva *va = r->unmap->va;
649 .kind = uvma_from_va(va)->kind,
650 .region = uvma_from_va(va)->region,
652 u64 ustart = va->va.addr;
653 u64 urange = va->va.range;
688 struct drm_gpuva *va = u->va;
689 u64 ustart = va->va.addr;
690 u64 urange = va->va.range;
709 op_unmap_prepare_unwind(va);
769 return op->remap.unmap->va->gem.obj;
771 return op->unmap.va->gem.obj;
781 struct nouveau_bo *nvbo = nouveau_gem_object(uvma->va.gem.obj);
789 struct drm_gpuva *va = u->va;
790 struct nouveau_uvma *uvma = uvma_from_va(va);
801 struct nouveau_uvma *uvma = uvma_from_va(u->va);
804 if (!drm_gpuva_invalidated(u->va))
813 struct nouveau_uvma *uvma = uvma_from_va(u->va);
814 u64 addr = uvma->va.va.addr;
815 u64 end = uvma->va.va.addr + uvma->va.va.range;
818 addr = r->prev->va.addr + r->prev->va.range;
821 end = r->next->va.addr;
883 struct drm_gpuva *va = r->unmap->va;
884 struct nouveau_uvma *uvma = uvma_from_va(va);
887 u64 addr = va->va.addr;
888 u64 end = addr + va->va.range;
891 addr = p->va.addr + p->va.range;
894 end = n->va.addr;
905 struct drm_gpuva *va = u->va;
906 struct nouveau_uvma *uvma = uvma_from_va(va);
1006 if (op->va.range > (obj->size - op->gem.offset))
1010 return nouveau_uvmm_validate_range(uvmm, op->va.addr, op->va.range);
1028 u64 op_addr = op->va.addr;
1029 u64 op_end = op_addr + op->va.range;
1076 reg_addr = reg->va.addr;
1077 reg_end = reg_addr + reg->va.range;
1096 u64 op_addr = op->va.addr;
1097 u64 op_range = op->va.range;
1130 drm_gpuva_link(&new->map->va, vm_bo);
1133 struct drm_gpuva *va = op->remap.unmap->va;
1136 drm_gpuva_link(&new->prev->va, va->vm_bo);
1138 drm_gpuva_link(&new->next->va, va->vm_bo);
1139 drm_gpuva_unlink(va);
1143 drm_gpuva_unlink(op->unmap.va);
1243 op->va.addr,
1244 op->va.range);
1250 op->reg = nouveau_uvma_region_find(uvmm, op->va.addr,
1251 op->va.range);
1258 op->va.addr,
1259 op->va.range);
1281 op->va.addr,
1282 op->va.range);
1284 u64 reg_addr = reg->va.addr;
1285 u64 reg_end = reg_addr + reg->va.range;
1286 u64 op_addr = op->va.addr;
1287 u64 op_end = op_addr + op->va.range;
1304 op->va.addr,
1305 op->va.range,
1315 op->va.addr,
1316 op->va.range,
1328 op->va.addr,
1329 op->va.range);
1401 nouveau_uvma_region_destroy(uvmm, op->va.addr,
1402 op->va.range);
1412 op->va.addr,
1413 op->va.range);
1569 op->va.addr = uop->addr;
1570 op->va.range = uop->range;
1760 struct drm_gpuva *va;
1765 drm_gpuvm_bo_for_each_va(va, vm_bo) {
1766 struct nouveau_uvma *uvma = uvma_from_va(va);
1769 drm_gpuva_invalidate(va, false);
1779 struct drm_gpuva *va;
1784 drm_gpuvm_bo_for_each_va(va, vm_bo) {
1785 struct nouveau_uvma *uvma = uvma_from_va(va);
1788 drm_gpuva_invalidate(va, true);
1896 struct drm_gpuva *va, *next;
1899 drm_gpuvm_for_each_va_safe(va, next, &uvmm->base) {
1900 struct nouveau_uvma *uvma = uvma_from_va(va);
1901 struct drm_gem_object *obj = va->gem.obj;
1903 if (unlikely(va == &uvmm->base.kernel_alloc_node))
1906 drm_gpuva_remove(va);
1909 drm_gpuva_unlink(va);