| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_ggtt.c | 78 struct xe_ggtt *ggtt; member 124 static void ggtt_update_access_counter(struct xe_ggtt *ggtt) in ggtt_update_access_counter() argument 126 struct xe_tile *tile = ggtt->tile; in ggtt_update_access_counter() 149 lockdep_assert_held(&ggtt->lock); in ggtt_update_access_counter() 151 if ((++ggtt->access_count % max_gtt_writes) == 0) { in ggtt_update_access_counter() 153 ggtt->access_count = 0; in ggtt_update_access_counter() 163 u64 xe_ggtt_start(struct xe_ggtt *ggtt) in xe_ggtt_start() argument 165 return ggtt->start; in xe_ggtt_start() 174 u64 xe_ggtt_size(struct xe_ggtt *ggtt) in xe_ggtt_size() argument 176 return ggtt->size; in xe_ggtt_size() [all …]
|
| H A D | xe_ggtt_types.h | 60 typedef void (*xe_ggtt_set_pte_fn)(struct xe_ggtt *ggtt, u64 addr, u64 pte); 61 typedef void (*xe_ggtt_transform_cb)(struct xe_ggtt *ggtt, 77 u64 (*ggtt_get_pte)(struct xe_ggtt *ggtt, u64 addr);
|
| /linux/drivers/gpu/drm/i915/selftests/ |
| H A D | i915_gem_evict.c | 48 static int populate_ggtt(struct i915_ggtt *ggtt, struct list_head *objects) in populate_ggtt() argument 57 obj = i915_gem_object_create_internal(ggtt->vm.i915, in populate_ggtt() 75 count, ggtt->vm.total / PAGE_SIZE); in populate_ggtt() 77 if (list_empty(&ggtt->vm.bound_list)) { in populate_ggtt() 85 static void unpin_ggtt(struct i915_ggtt *ggtt) in unpin_ggtt() argument 89 list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) in unpin_ggtt() 94 static void cleanup_objects(struct i915_ggtt *ggtt, struct list_head *list) in cleanup_objects() argument 104 i915_gem_drain_freed_objects(ggtt->vm.i915); in cleanup_objects() 110 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_something() local 116 err = populate_ggtt(ggtt, &objects); in igt_evict_something() [all …]
|
| H A D | mock_gtt.c | 109 struct i915_ggtt *ggtt = gt->ggtt; in mock_init_ggtt() local 111 ggtt->vm.gt = gt; in mock_init_ggtt() 112 ggtt->vm.i915 = gt->i915; in mock_init_ggtt() 113 ggtt->vm.is_ggtt = true; in mock_init_ggtt() 115 ggtt->gmadr = DEFINE_RES_MEM(0, 2048 * PAGE_SIZE); in mock_init_ggtt() 116 ggtt->mappable_end = resource_size(&ggtt->gmadr); in mock_init_ggtt() 117 ggtt->vm.total = 4096 * PAGE_SIZE; in mock_init_ggtt() 119 ggtt->vm.alloc_pt_dma = alloc_pt_dma; in mock_init_ggtt() 120 ggtt->vm.alloc_scratch_dma = alloc_pt_dma; in mock_init_ggtt() 122 ggtt->vm.clear_range = mock_clear_range; in mock_init_ggtt() [all …]
|
| H A D | i915_gem_gtt.c | 1288 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in exercise_ggtt() local 1295 list_sort(NULL, &ggtt->vm.mm.hole_stack, sort_holes); in exercise_ggtt() 1296 drm_mm_for_each_hole(node, &ggtt->vm.mm, hole_start, hole_end) { in exercise_ggtt() 1300 if (ggtt->vm.mm.color_adjust) in exercise_ggtt() 1301 ggtt->vm.mm.color_adjust(node, 0, in exercise_ggtt() 1306 err = func(&ggtt->vm, hole_start, hole_end, end_time); in exercise_ggtt() 1353 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in igt_ggtt_page() local 1360 if (!i915_ggtt_has_aperture(ggtt)) in igt_ggtt_page() 1372 mutex_lock(&ggtt->vm.mutex); in igt_ggtt_page() 1373 err = drm_mm_insert_node_in_range(&ggtt->vm.mm, &tmp, in igt_ggtt_page() [all …]
|
| H A D | mock_gtt.h | 33 void mock_fini_ggtt(struct i915_ggtt *ggtt);
|
| H A D | mock_gem_device.c | 74 mock_fini_ggtt(to_gt(i915)->ggtt); in mock_device_release() 238 to_gt(i915)->vm = i915_vm_get(&to_gt(i915)->ggtt->vm); in mock_gem_device()
|
| /linux/drivers/gpu/drm/i915/gt/ |
| H A D | intel_ggtt_fencing.c | 53 return fence->ggtt->vm.i915; in fence_to_i915() 58 return fence->ggtt->vm.gt->uncore; in fence_to_uncore() 205 struct i915_ggtt *ggtt = fence->ggtt; in fence_update() local 255 list_move(&fence->link, &ggtt->fence_list); in fence_update() 279 list_move_tail(&fence->link, &ggtt->fence_list); in fence_update() 331 static struct i915_fence_reg *fence_find(struct i915_ggtt *ggtt) in fence_find() argument 333 struct intel_display *display = ggtt->vm.i915->display; in fence_find() 337 list_for_each_entry_safe(fence, fn, &ggtt->fence_list, link) { in fence_find() 348 list_move_tail(&fence->link, &ggtt->fence_list); in fence_find() 367 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); in __i915_vma_pin_fence() local [all …]
|
| H A D | selftest_reset.c | 22 struct i915_ggtt *ggtt = gt->ggtt; in __igt_reset_stolen() local 34 if (!drm_mm_node_allocated(&ggtt->error_capture)) in __igt_reset_stolen() 87 ggtt->vm.insert_page(&ggtt->vm, dma, in __igt_reset_stolen() 88 ggtt->error_capture.start, in __igt_reset_stolen() 94 s = io_mapping_map_wc(&ggtt->iomap, in __igt_reset_stolen() 95 ggtt->error_capture.start, in __igt_reset_stolen() 111 ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE); in __igt_reset_stolen() 130 ggtt->vm.insert_page(&ggtt->vm, dma, in __igt_reset_stolen() 131 ggtt->error_capture.start, in __igt_reset_stolen() 137 s = io_mapping_map_wc(&ggtt->iomap, in __igt_reset_stolen() [all …]
|
| H A D | intel_ggtt_fencing.h | 24 struct i915_ggtt *ggtt; member 44 struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt); 47 void intel_ggtt_restore_fences(struct i915_ggtt *ggtt); 54 void intel_ggtt_init_fences(struct i915_ggtt *ggtt); 55 void intel_ggtt_fini_fences(struct i915_ggtt *ggtt);
|
| H A D | gen6_ppgtt.c | 166 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); in gen6_flush_pd() 286 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in pd_vma_bind() local 291 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset; in pd_vma_bind() 382 struct i915_ggtt * const ggtt = ppgtt->base.vm.gt->ggtt; in gen6_alloc_top_pd() local 402 ppgtt->vma = i915_vma_instance(pd->pt.base, &ggtt->vm, NULL); in gen6_alloc_top_pd() 430 struct i915_ggtt * const ggtt = gt->ggtt; in gen6_ppgtt_create() local 452 ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode; in gen6_ppgtt_create()
|
| H A D | intel_ggtt_gmch.h | 16 int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt); 23 static inline int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt) { return -ENODEV; } in intel_ggtt_gmch_probe() argument
|
| H A D | intel_ring.c | 111 static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size) in create_ring_vma() argument 113 struct i915_address_space *vm = &ggtt->vm; in create_ring_vma() 120 if (IS_ERR(obj) && i915_ggtt_has_aperture(ggtt) && !HAS_LLC(i915)) in create_ring_vma() 174 vma = create_ring_vma(engine->gt->ggtt, size); in intel_engine_create_ring()
|
| H A D | intel_gt_types.h | 103 struct i915_ggtt *ggtt; member 322 #define intel_gt_support_legacy_fencing(gt) ((gt)->ggtt->num_fences > 0)
|
| H A D | mock_engine.c | 38 static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size) in create_ring_vma() argument 40 struct i915_address_space *vm = &ggtt->vm; in create_ring_vma() 75 ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE); in mock_ring()
|
| /linux/drivers/gpu/drm/i915/ |
| H A D | i915_gem.c | 62 insert_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node, u32 size) in insert_mappable_node() argument 66 err = mutex_lock_interruptible(&ggtt->vm.mutex); in insert_mappable_node() 71 err = drm_mm_insert_node_in_range(&ggtt->vm.mm, node, in insert_mappable_node() 73 0, ggtt->mappable_end, in insert_mappable_node() 76 mutex_unlock(&ggtt->vm.mutex); in insert_mappable_node() 82 remove_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node) in remove_mappable_node() argument 84 mutex_lock(&ggtt->vm.mutex); in remove_mappable_node() 86 mutex_unlock(&ggtt->vm.mutex); in remove_mappable_node() 94 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_get_aperture_ioctl() local 99 if (mutex_lock_interruptible(&ggtt->vm.mutex)) in i915_gem_get_aperture_ioctl() [all …]
|
| H A D | i915_gem_gtt.c | 59 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_finish_pages() local 62 if (unlikely(ggtt->do_idle_maps)) in i915_gem_gtt_finish_pages() 108 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_reserve() 208 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_insert()
|
| H A D | i915_gem_evict.c | 48 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in ggtt_flush() local 52 list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) { in ggtt_flush() 188 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in i915_gem_evict_something() local 190 list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) in i915_gem_evict_something() 353 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in i915_gem_evict_for_node() local 356 list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) in i915_gem_evict_for_node()
|
| H A D | i915_vgpu.h | 39 int intel_vgt_balloon(struct i915_ggtt *ggtt); 40 void intel_vgt_deballoon(struct i915_ggtt *ggtt);
|
| H A D | i915_gpu_error.c | 1147 struct i915_ggtt *ggtt = gt->ggtt; in i915_vma_coredump_create() local 1148 const u64 slot = ggtt->error_capture.start; in i915_vma_coredump_create() 1177 if (drm_mm_node_allocated(&ggtt->error_capture)) { in i915_vma_coredump_create() 1182 mutex_lock(&ggtt->error_mutex); in i915_vma_coredump_create() 1183 if (ggtt->vm.raw_insert_page) in i915_vma_coredump_create() 1184 ggtt->vm.raw_insert_page(&ggtt->vm, dma, slot, in i915_vma_coredump_create() 1189 ggtt->vm.insert_page(&ggtt->vm, dma, slot, in i915_vma_coredump_create() 1195 s = io_mapping_map_wc(&ggtt->iomap, slot, PAGE_SIZE); in i915_vma_coredump_create() 1202 ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); in i915_vma_coredump_create() 1203 mutex_unlock(&ggtt->error_mutex); in i915_vma_coredump_create() [all …]
|
| /linux/drivers/gpu/drm/i915/gem/ |
| H A D | i915_gem_stolen.c | 113 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in adjust_stolen() local 114 struct intel_uncore *uncore = ggtt->vm.gt->uncore; in adjust_stolen() 138 ggtt_res = DEFINE_RES_MEM(ggtt_start, ggtt_total_entries(ggtt) * 4); in adjust_stolen() 597 static void dbg_poison(struct i915_ggtt *ggtt, in dbg_poison() argument 602 if (!drm_mm_node_allocated(&ggtt->error_capture)) in dbg_poison() 605 if (ggtt->vm.bind_async_flags & I915_VMA_GLOBAL_BIND) in dbg_poison() 610 mutex_lock(&ggtt->error_mutex); in dbg_poison() 614 ggtt->vm.insert_page(&ggtt->vm, addr, in dbg_poison() 615 ggtt->error_capture.start, in dbg_poison() 616 i915_gem_get_pat_index(ggtt->vm.i915, in dbg_poison() [all …]
|
| H A D | i915_gem_mman.c | 347 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in vm_fault_gtt() local 383 ret = intel_gt_reset_lock_interruptible(ggtt->vm.gt, &srcu); in vm_fault_gtt() 420 ret = mutex_lock_interruptible(&ggtt->vm.mutex); in vm_fault_gtt() 422 ret = i915_gem_evict_vm(&ggtt->vm, &ww, NULL); in vm_fault_gtt() 423 mutex_unlock(&ggtt->vm.mutex); in vm_fault_gtt() 459 set_address_limits(area, vma, obj_offset, ggtt->gmadr.start, in vm_fault_gtt() 463 ret = remap_io_mapping(area, start, pfn, end - start, &ggtt->iomap); in vm_fault_gtt() 470 mutex_lock(&to_gt(i915)->ggtt->vm.mutex); in vm_fault_gtt() 472 list_add(&obj->userfault_link, &to_gt(i915)->ggtt->userfault_list); in vm_fault_gtt() 473 mutex_unlock(&to_gt(i915)->ggtt->vm.mutex); in vm_fault_gtt() [all …]
|
| H A D | i915_gem_execbuffer.c | 1133 return to_gt(i915)->ggtt; in cache_to_ggtt() 1165 struct i915_ggtt *ggtt = cache_to_ggtt(cache); in reloc_cache_remap() local 1173 io_mapping_map_atomic_wc(&ggtt->iomap, offset); in reloc_cache_remap() 1194 struct i915_ggtt *ggtt = cache_to_ggtt(cache); in reloc_cache_reset() local 1196 intel_gt_flush_ggtt_writes(ggtt->vm.gt); in reloc_cache_reset() 1200 ggtt->vm.clear_range(&ggtt->vm, in reloc_cache_reset() 1203 mutex_lock(&ggtt->vm.mutex); in reloc_cache_reset() 1205 mutex_unlock(&ggtt->vm.mutex); in reloc_cache_reset() 1258 struct i915_ggtt *ggtt = cache_to_ggtt(cache); in reloc_iomap() local 1263 intel_gt_flush_ggtt_writes(ggtt->vm.gt); in reloc_iomap() [all …]
|
| /linux/drivers/gpu/drm/i915/gem/selftests/ |
| H A D | i915_gem_client_blt.c | 371 swizzle = gt->ggtt->bit_6_swizzle_x; in tiled_offset() 387 swizzle = gt->ggtt->bit_6_swizzle_y; in tiled_offset() 693 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in bad_swizzling() local 698 if (has_bit17_swizzle(ggtt->bit_6_swizzle_x) || in bad_swizzling() 699 has_bit17_swizzle(ggtt->bit_6_swizzle_y)) in bad_swizzling()
|
| H A D | i915_gem_mman.c | 326 if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in igt_partial_tiling() 339 (1 + next_prime_number(to_gt(i915)->ggtt->vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_partial_tiling() 385 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_x; in igt_partial_tiling() 388 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_y; in igt_partial_tiling() 459 if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in igt_smoke_tiling() 476 (1 + next_prime_number(to_gt(i915)->ggtt->vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_smoke_tiling() 505 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_x; in igt_smoke_tiling() 508 tile.swizzle = to_gt(i915)->ggtt->bit_6_swizzle_y; in igt_smoke_tiling() 553 vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); in make_obj_busy() 884 !i915_ggtt_has_aperture(to_gt(i915)->ggtt)) in can_mmap()
|