Lines Matching full:remap

1921 		vma = gpuva_to_vma(op->remap.unmap->va);
1922 vm_dbg(&xe->drm, "REMAP:UNMAP: addr=0x%016llx, range=0x%016llx, keep=%d",
1924 op->remap.unmap->keep ? 1 : 0);
1925 if (op->remap.prev)
1927 "REMAP:PREV: addr=0x%016llx, range=0x%016llx",
1928 (ULL)op->remap.prev->va.addr,
1929 (ULL)op->remap.prev->va.range);
1930 if (op->remap.next)
1932 "REMAP:NEXT: addr=0x%016llx, range=0x%016llx",
1933 (ULL)op->remap.next->va.addr,
1934 (ULL)op->remap.next->va.range);
2137 gpuva_to_vma(op->base.remap.unmap->va)->tile_present;
2139 prep_vma_destroy(vm, gpuva_to_vma(op->base.remap.unmap->va),
2143 if (op->remap.prev) {
2144 err |= xe_vm_insert_vma(vm, op->remap.prev);
2147 if (!err && op->remap.skip_prev) {
2148 op->remap.prev->tile_present =
2150 op->remap.prev = NULL;
2153 if (op->remap.next) {
2154 err |= xe_vm_insert_vma(vm, op->remap.next);
2157 if (!err && op->remap.skip_next) {
2158 op->remap.next->tile_present =
2160 op->remap.next = NULL;
2166 op->base.remap.unmap->va->va.addr = op->remap.start;
2167 op->base.remap.unmap->va->va.range = op->remap.range;
2232 gpuva_to_vma(op->base.remap.unmap->va);
2234 op->remap.start = xe_vma_start(old);
2235 op->remap.range = xe_vma_size(old);
2237 if (op->base.remap.prev) {
2238 flags |= op->base.remap.unmap->va->flags &
2241 flags |= op->base.remap.unmap->va->flags &
2244 flags |= op->base.remap.unmap->va->flags &
2248 vma = new_vma(vm, op->base.remap.prev,
2253 op->remap.prev = vma;
2259 op->remap.skip_prev = !xe_vma_is_userptr(old) &&
2262 if (op->remap.skip_prev) {
2264 op->remap.range -=
2267 op->remap.start = xe_vma_end(vma);
2268 vm_dbg(&xe->drm, "REMAP:SKIP_PREV: addr=0x%016llx, range=0x%016llx",
2269 (ULL)op->remap.start,
2270 (ULL)op->remap.range);
2276 if (op->base.remap.next) {
2277 flags |= op->base.remap.unmap->va->flags &
2280 flags |= op->base.remap.unmap->va->flags &
2283 flags |= op->base.remap.unmap->va->flags &
2287 vma = new_vma(vm, op->base.remap.next,
2292 op->remap.next = vma;
2298 op->remap.skip_next = !xe_vma_is_userptr(old) &&
2301 if (op->remap.skip_next) {
2303 op->remap.range -=
2306 vm_dbg(&xe->drm, "REMAP:SKIP_NEXT: addr=0x%016llx, range=0x%016llx",
2307 (ULL)op->remap.start,
2308 (ULL)op->remap.range);
2370 struct xe_vma *vma = gpuva_to_vma(op->base.remap.unmap->va);
2372 if (op->remap.prev) {
2373 prep_vma_destroy(vm, op->remap.prev, prev_post_commit);
2374 xe_vma_destroy_unlocked(op->remap.prev);
2376 if (op->remap.next) {
2377 prep_vma_destroy(vm, op->remap.next, next_post_commit);
2378 xe_vma_destroy_unlocked(op->remap.next);
2466 err = check_ufence(gpuva_to_vma(op->base.remap.unmap->va));
2471 gpuva_to_vma(op->base.remap.unmap->va),
2473 if (!err && op->remap.prev)
2474 err = vma_lock_and_validate(exec, op->remap.prev, true);
2475 if (!err && op->remap.next)
2476 err = vma_lock_and_validate(exec, op->remap.next, true);
2542 trace_xe_vma_unbind(gpuva_to_vma(op->base.remap.unmap->va));
2543 if (op->remap.prev)
2544 trace_xe_vma_bind(op->remap.prev);
2545 if (op->remap.next)
2546 trace_xe_vma_bind(op->remap.next);
2695 if (op->remap.prev)
2696 vma_add_ufence(op->remap.prev, ufence);
2697 if (op->remap.next)
2698 vma_add_ufence(op->remap.next, ufence);
2726 xe_vma_destroy(gpuva_to_vma(op->base.remap.unmap->va),