Lines Matching refs:vma

398 		struct i915_vma *vma;  in close_object_list()  local
400 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
401 if (!IS_ERR(vma)) in close_object_list()
402 ignored = i915_vma_unbind_unlocked(vma); in close_object_list()
421 struct i915_vma *vma; in fill_hole() local
463 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
464 if (IS_ERR(vma)) in fill_hole()
473 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
480 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
481 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
483 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
489 i915_vma_unpin(vma); in fill_hole()
503 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
504 if (IS_ERR(vma)) in fill_hole()
513 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
514 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
516 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
522 err = i915_vma_unbind_unlocked(vma); in fill_hole()
525 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
542 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
543 if (IS_ERR(vma)) in fill_hole()
552 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
559 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
560 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
562 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
568 i915_vma_unpin(vma); in fill_hole()
582 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
583 if (IS_ERR(vma)) in fill_hole()
592 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
593 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
595 __func__, p->name, vma->node.start, vma->node.size, drm_mm_node_allocated(&vma->node), in fill_hole()
601 err = i915_vma_unbind_unlocked(vma); in fill_hole()
604 __func__, p->name, vma->node.start, vma->node.size, in fill_hole()
656 struct i915_vma *vma; in walk_hole() local
664 vma = i915_vma_instance(obj, vm, NULL); in walk_hole()
665 if (IS_ERR(vma)) { in walk_hole()
666 err = PTR_ERR(vma); in walk_hole()
673 err = i915_vma_pin(vma, 0, 0, addr | flags); in walk_hole()
676 __func__, addr, vma->size, in walk_hole()
680 i915_vma_unpin(vma); in walk_hole()
682 if (!drm_mm_node_allocated(&vma->node) || in walk_hole()
683 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in walk_hole()
685 __func__, addr, vma->size); in walk_hole()
690 err = i915_vma_unbind_unlocked(vma); in walk_hole()
693 __func__, addr, vma->size, err); in walk_hole()
697 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in walk_hole()
723 struct i915_vma *vma; in pot_hole() local
739 vma = i915_vma_instance(obj, vm, NULL); in pot_hole()
740 if (IS_ERR(vma)) { in pot_hole()
741 err = PTR_ERR(vma); in pot_hole()
755 err = i915_vma_pin(vma, 0, 0, addr | flags); in pot_hole()
765 if (!drm_mm_node_allocated(&vma->node) || in pot_hole()
766 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in pot_hole()
768 __func__, addr, vma->size); in pot_hole()
769 i915_vma_unpin(vma); in pot_hole()
770 err = i915_vma_unbind_unlocked(vma); in pot_hole()
775 i915_vma_unpin(vma); in pot_hole()
776 err = i915_vma_unbind_unlocked(vma); in pot_hole()
812 struct i915_vma *vma; in drunk_hole() local
849 vma = i915_vma_instance(obj, vm, NULL); in drunk_hole()
850 if (IS_ERR(vma)) { in drunk_hole()
851 err = PTR_ERR(vma); in drunk_hole()
855 GEM_BUG_ON(vma->size != BIT_ULL(size)); in drunk_hole()
860 err = i915_vma_pin(vma, 0, 0, addr | flags); in drunk_hole()
870 if (!drm_mm_node_allocated(&vma->node) || in drunk_hole()
871 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in drunk_hole()
874 i915_vma_unpin(vma); in drunk_hole()
875 err = i915_vma_unbind_unlocked(vma); in drunk_hole()
880 i915_vma_unpin(vma); in drunk_hole()
881 err = i915_vma_unbind_unlocked(vma); in drunk_hole()
920 struct i915_vma *vma; in __shrink_hole() local
932 vma = i915_vma_instance(obj, vm, NULL); in __shrink_hole()
933 if (IS_ERR(vma)) { in __shrink_hole()
934 err = PTR_ERR(vma); in __shrink_hole()
938 GEM_BUG_ON(vma->size != size); in __shrink_hole()
940 err = i915_vma_pin(vma, 0, 0, addr | flags); in __shrink_hole()
947 if (!drm_mm_node_allocated(&vma->node) || in __shrink_hole()
948 i915_vma_misplaced(vma, 0, 0, addr | flags)) { in __shrink_hole()
951 i915_vma_unpin(vma); in __shrink_hole()
952 err = i915_vma_unbind_unlocked(vma); in __shrink_hole()
957 i915_vma_unpin(vma); in __shrink_hole()
965 err = i915_vma_sync(vma); in __shrink_hole()
1024 struct i915_vma *vma; in shrink_boom() local
1030 vma = i915_vma_instance(purge, vm, NULL); in shrink_boom()
1031 if (IS_ERR(vma)) { in shrink_boom()
1032 err = PTR_ERR(vma); in shrink_boom()
1036 err = i915_vma_pin(vma, 0, 0, flags); in shrink_boom()
1041 i915_vma_unpin(vma); in shrink_boom()
1053 vma = i915_vma_instance(explode, vm, NULL); in shrink_boom()
1054 if (IS_ERR(vma)) { in shrink_boom()
1055 err = PTR_ERR(vma); in shrink_boom()
1059 err = i915_vma_pin(vma, 0, 0, flags | size); in shrink_boom()
1063 i915_vma_unpin(vma); in shrink_boom()
1086 struct i915_vma *vma; in misaligned_case() local
1100 vma = i915_vma_instance(obj, vm, NULL); in misaligned_case()
1101 if (IS_ERR(vma)) { in misaligned_case()
1102 err = PTR_ERR(vma); in misaligned_case()
1106 err = i915_vma_pin(vma, 0, 0, addr | flags); in misaligned_case()
1109 i915_vma_unpin(vma); in misaligned_case()
1111 if (!drm_mm_node_allocated(&vma->node)) { in misaligned_case()
1116 if (i915_vma_misplaced(vma, 0, 0, addr | flags)) { in misaligned_case()
1121 expected_vma_size = round_up(size, 1 << (ffs(vma->resource->page_sizes_gtt) - 1)); in misaligned_case()
1129 if (vma->size != expected_vma_size || vma->node.size != expected_node_size) { in misaligned_case()
1130 err = i915_vma_unbind_unlocked(vma); in misaligned_case()
1135 err = i915_vma_unbind_unlocked(vma); in misaligned_case()
1139 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in misaligned_case()
1439 static void track_vma_bind(struct i915_vma *vma) in track_vma_bind() argument
1441 struct drm_i915_gem_object *obj = vma->obj; in track_vma_bind()
1445 GEM_BUG_ON(atomic_read(&vma->pages_count)); in track_vma_bind()
1446 atomic_set(&vma->pages_count, I915_VMA_PAGES_ACTIVE); in track_vma_bind()
1448 vma->pages = obj->mm.pages; in track_vma_bind()
1449 vma->resource->bi.pages = vma->pages; in track_vma_bind()
1451 mutex_lock(&vma->vm->mutex); in track_vma_bind()
1452 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in track_vma_bind()
1453 mutex_unlock(&vma->vm->mutex); in track_vma_bind()
1507 static int reserve_gtt_with_resource(struct i915_vma *vma, u64 offset) in reserve_gtt_with_resource() argument
1509 struct i915_address_space *vm = vma->vm; in reserve_gtt_with_resource()
1511 struct drm_i915_gem_object *obj = vma->obj; in reserve_gtt_with_resource()
1519 err = i915_gem_gtt_reserve(vm, NULL, &vma->node, obj->base.size, in reserve_gtt_with_resource()
1524 i915_vma_resource_init_from_vma(vma_res, vma); in reserve_gtt_with_resource()
1525 vma->resource = vma_res; in reserve_gtt_with_resource()
1553 struct i915_vma *vma; in igt_gtt_reserve() local
1569 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1570 if (IS_ERR(vma)) { in igt_gtt_reserve()
1571 err = PTR_ERR(vma); in igt_gtt_reserve()
1575 err = reserve_gtt_with_resource(vma, total); in igt_gtt_reserve()
1581 track_vma_bind(vma); in igt_gtt_reserve()
1583 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1584 if (vma->node.start != total || in igt_gtt_reserve()
1585 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1587 vma->node.start, vma->node.size, in igt_gtt_reserve()
1598 struct i915_vma *vma; in igt_gtt_reserve() local
1615 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1616 if (IS_ERR(vma)) { in igt_gtt_reserve()
1617 err = PTR_ERR(vma); in igt_gtt_reserve()
1621 err = reserve_gtt_with_resource(vma, total); in igt_gtt_reserve()
1627 track_vma_bind(vma); in igt_gtt_reserve()
1629 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1630 if (vma->node.start != total || in igt_gtt_reserve()
1631 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1633 vma->node.start, vma->node.size, in igt_gtt_reserve()
1642 struct i915_vma *vma; in igt_gtt_reserve() local
1645 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_reserve()
1646 if (IS_ERR(vma)) { in igt_gtt_reserve()
1647 err = PTR_ERR(vma); in igt_gtt_reserve()
1651 err = i915_vma_unbind_unlocked(vma); in igt_gtt_reserve()
1662 err = reserve_gtt_with_resource(vma, offset); in igt_gtt_reserve()
1668 track_vma_bind(vma); in igt_gtt_reserve()
1670 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_reserve()
1671 if (vma->node.start != offset || in igt_gtt_reserve()
1672 vma->node.size != 2*I915_GTT_PAGE_SIZE) { in igt_gtt_reserve()
1674 vma->node.start, vma->node.size, in igt_gtt_reserve()
1689 static int insert_gtt_with_resource(struct i915_vma *vma) in insert_gtt_with_resource() argument
1691 struct i915_address_space *vm = vma->vm; in insert_gtt_with_resource()
1693 struct drm_i915_gem_object *obj = vma->obj; in insert_gtt_with_resource()
1701 err = i915_gem_gtt_insert(vm, NULL, &vma->node, obj->base.size, 0, in insert_gtt_with_resource()
1704 i915_vma_resource_init_from_vma(vma_res, vma); in insert_gtt_with_resource()
1705 vma->resource = vma_res; in insert_gtt_with_resource()
1776 struct i915_vma *vma; in igt_gtt_insert() local
1793 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1794 if (IS_ERR(vma)) { in igt_gtt_insert()
1795 err = PTR_ERR(vma); in igt_gtt_insert()
1799 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1810 track_vma_bind(vma); in igt_gtt_insert()
1811 __i915_vma_pin(vma); in igt_gtt_insert()
1813 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1817 struct i915_vma *vma; in igt_gtt_insert() local
1819 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1820 if (IS_ERR(vma)) { in igt_gtt_insert()
1821 err = PTR_ERR(vma); in igt_gtt_insert()
1825 if (!drm_mm_node_allocated(&vma->node)) { in igt_gtt_insert()
1831 __i915_vma_unpin(vma); in igt_gtt_insert()
1836 struct i915_vma *vma; in igt_gtt_insert() local
1839 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1840 if (IS_ERR(vma)) { in igt_gtt_insert()
1841 err = PTR_ERR(vma); in igt_gtt_insert()
1845 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1846 offset = vma->node.start; in igt_gtt_insert()
1848 err = i915_vma_unbind_unlocked(vma); in igt_gtt_insert()
1854 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1860 track_vma_bind(vma); in igt_gtt_insert()
1862 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()
1863 if (vma->node.start != offset) { in igt_gtt_insert()
1865 offset, vma->node.start); in igt_gtt_insert()
1875 struct i915_vma *vma; in igt_gtt_insert() local
1892 vma = i915_vma_instance(obj, &ggtt->vm, NULL); in igt_gtt_insert()
1893 if (IS_ERR(vma)) { in igt_gtt_insert()
1894 err = PTR_ERR(vma); in igt_gtt_insert()
1898 err = insert_gtt_with_resource(vma); in igt_gtt_insert()
1904 track_vma_bind(vma); in igt_gtt_insert()
1906 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in igt_gtt_insert()