Lines Matching full:cursor

152  * @cursor: state to initialize
158 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_start() argument
160 cursor->pfn = start; in amdgpu_vm_pt_start()
161 cursor->parent = NULL; in amdgpu_vm_pt_start()
162 cursor->entry = &vm->root; in amdgpu_vm_pt_start()
163 cursor->level = adev->vm_manager.root_level; in amdgpu_vm_pt_start()
170 * @cursor: current state
177 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_descendant() argument
181 if ((cursor->level == AMDGPU_VM_PTB) || !cursor->entry || in amdgpu_vm_pt_descendant()
182 !cursor->entry->bo) in amdgpu_vm_pt_descendant()
185 mask = amdgpu_vm_pt_entries_mask(adev, cursor->level); in amdgpu_vm_pt_descendant()
186 shift = amdgpu_vm_pt_level_shift(adev, cursor->level); in amdgpu_vm_pt_descendant()
188 ++cursor->level; in amdgpu_vm_pt_descendant()
189 idx = (cursor->pfn >> shift) & mask; in amdgpu_vm_pt_descendant()
190 cursor->parent = cursor->entry; in amdgpu_vm_pt_descendant()
191 cursor->entry = &to_amdgpu_bo_vm(cursor->entry->bo)->entries[idx]; in amdgpu_vm_pt_descendant()
199 * @cursor: current state
206 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_sibling() argument
213 if (!cursor->parent) in amdgpu_vm_pt_sibling()
217 shift = amdgpu_vm_pt_level_shift(adev, cursor->level - 1); in amdgpu_vm_pt_sibling()
218 num_entries = amdgpu_vm_pt_num_entries(adev, cursor->level - 1); in amdgpu_vm_pt_sibling()
219 parent = to_amdgpu_bo_vm(cursor->parent->bo); in amdgpu_vm_pt_sibling()
221 if (cursor->entry == &parent->entries[num_entries - 1]) in amdgpu_vm_pt_sibling()
224 cursor->pfn += 1ULL << shift; in amdgpu_vm_pt_sibling()
225 cursor->pfn &= ~((1ULL << shift) - 1); in amdgpu_vm_pt_sibling()
226 ++cursor->entry; in amdgpu_vm_pt_sibling()
233 * @cursor: current state
239 static bool amdgpu_vm_pt_ancestor(struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_ancestor() argument
241 if (!cursor->parent) in amdgpu_vm_pt_ancestor()
244 --cursor->level; in amdgpu_vm_pt_ancestor()
245 cursor->entry = cursor->parent; in amdgpu_vm_pt_ancestor()
246 cursor->parent = amdgpu_vm_pt_parent(cursor->parent); in amdgpu_vm_pt_ancestor()
254 * @cursor: current state
259 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_next() argument
262 if (amdgpu_vm_pt_descendant(adev, cursor)) in amdgpu_vm_pt_next()
266 while (!amdgpu_vm_pt_sibling(adev, cursor)) { in amdgpu_vm_pt_next()
268 if (!amdgpu_vm_pt_ancestor(cursor)) { in amdgpu_vm_pt_next()
269 cursor->pfn = ~0ll; in amdgpu_vm_pt_next()
280 * @start: optional cursor to start with
281 * @cursor: state to initialize
288 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_first_dfs() argument
291 *cursor = *start; in amdgpu_vm_pt_first_dfs()
293 amdgpu_vm_pt_start(adev, vm, 0, cursor); in amdgpu_vm_pt_first_dfs()
295 while (amdgpu_vm_pt_descendant(adev, cursor)) in amdgpu_vm_pt_first_dfs()
318 * @cursor: current state
320 * Move the cursor to the next node in a deep first search.
323 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_next_dfs() argument
325 if (!cursor->entry) in amdgpu_vm_pt_next_dfs()
328 if (!cursor->parent) in amdgpu_vm_pt_next_dfs()
329 cursor->entry = NULL; in amdgpu_vm_pt_next_dfs()
330 else if (amdgpu_vm_pt_sibling(adev, cursor)) in amdgpu_vm_pt_next_dfs()
331 while (amdgpu_vm_pt_descendant(adev, cursor)) in amdgpu_vm_pt_next_dfs()
334 amdgpu_vm_pt_ancestor(cursor); in amdgpu_vm_pt_next_dfs()
340 #define for_each_amdgpu_vm_pt_dfs_safe(adev, vm, start, cursor, entry) \ argument
341 for (amdgpu_vm_pt_first_dfs((adev), (vm), (start), &(cursor)), \
342 (entry) = (cursor).entry, amdgpu_vm_pt_next_dfs((adev), &(cursor));\
344 (entry) = (cursor).entry, amdgpu_vm_pt_next_dfs((adev), &(cursor)))
484 * @cursor: Which page table to allocate
495 struct amdgpu_vm_pt_cursor *cursor, in amdgpu_vm_pt_alloc() argument
498 struct amdgpu_vm_bo_base *entry = cursor->entry; in amdgpu_vm_pt_alloc()
507 r = amdgpu_vm_pt_create(adev, vm, cursor->level, immediate, &pt, in amdgpu_vm_pt_alloc()
517 pt_bo->parent = amdgpu_bo_ref(cursor->parent->bo); in amdgpu_vm_pt_alloc()
604 * @cursor: first PT entry to start DF search from, non NULL
609 struct amdgpu_vm_pt_cursor *cursor) in amdgpu_vm_pt_add_list() argument
615 for_each_amdgpu_vm_pt_dfs_safe(params->adev, params->vm, cursor, seek, entry) { in amdgpu_vm_pt_add_list()
621 list_move(&cursor->entry->vm_status, &params->tlb_flush_waitlist); in amdgpu_vm_pt_add_list()
634 struct amdgpu_vm_pt_cursor cursor; in amdgpu_vm_pt_free_root() local
637 for_each_amdgpu_vm_pt_dfs_safe(adev, vm, NULL, cursor, entry) { in amdgpu_vm_pt_free_root()
824 struct amdgpu_vm_pt_cursor cursor; in amdgpu_vm_ptes_update() local
834 amdgpu_vm_pt_start(adev, params->vm, start, &cursor); in amdgpu_vm_ptes_update()
835 while (cursor.pfn < end) { in amdgpu_vm_ptes_update()
845 &cursor, params->immediate); in amdgpu_vm_ptes_update()
850 shift = amdgpu_vm_pt_level_shift(adev, cursor.level); in amdgpu_vm_ptes_update()
851 parent_shift = amdgpu_vm_pt_level_shift(adev, cursor.level - 1); in amdgpu_vm_ptes_update()
854 if (amdgpu_vm_pt_descendant(adev, &cursor)) in amdgpu_vm_ptes_update()
859 if (cursor.level != AMDGPU_VM_PTB) { in amdgpu_vm_ptes_update()
860 if (!amdgpu_vm_pt_descendant(adev, &cursor)) in amdgpu_vm_ptes_update()
869 if (amdgpu_vm_pt_descendant(adev, &cursor)) in amdgpu_vm_ptes_update()
875 if (!amdgpu_vm_pt_ancestor(&cursor)) in amdgpu_vm_ptes_update()
880 pt = cursor.entry->bo; in amdgpu_vm_ptes_update()
889 if (!amdgpu_vm_pt_ancestor(&cursor)) in amdgpu_vm_ptes_update()
892 pt = cursor.entry->bo; in amdgpu_vm_ptes_update()
900 mask = amdgpu_vm_pt_entries_mask(adev, cursor.level); in amdgpu_vm_ptes_update()
901 pe_start = ((cursor.pfn >> shift) & mask) * 8; in amdgpu_vm_ptes_update()
903 entry_end += cursor.pfn & ~(entry_end - 1); in amdgpu_vm_ptes_update()
923 cursor.level, pe_start, dst, in amdgpu_vm_ptes_update()
939 if (amdgpu_vm_pt_descendant(adev, &cursor)) { in amdgpu_vm_ptes_update()
946 while (cursor.pfn < frag_start) { in amdgpu_vm_ptes_update()
948 if (cursor.entry->bo) { in amdgpu_vm_ptes_update()
950 amdgpu_vm_pt_add_list(params, &cursor); in amdgpu_vm_ptes_update()
952 amdgpu_vm_pt_next(adev, &cursor); in amdgpu_vm_ptes_update()
957 amdgpu_vm_pt_next(adev, &cursor); in amdgpu_vm_ptes_update()
973 struct amdgpu_vm_pt_cursor cursor; in amdgpu_vm_pt_map_tables() local
976 for_each_amdgpu_vm_pt_dfs_safe(adev, vm, NULL, cursor, entry) { in amdgpu_vm_pt_map_tables()