| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_ggtt.c | 107 static void ggtt_update_access_counter(struct xe_ggtt *ggtt) in ggtt_update_access_counter() argument 109 struct xe_tile *tile = ggtt->tile; in ggtt_update_access_counter() 132 lockdep_assert_held(&ggtt->lock); in ggtt_update_access_counter() 134 if ((++ggtt->access_count % max_gtt_writes) == 0) { in ggtt_update_access_counter() 136 ggtt->access_count = 0; in ggtt_update_access_counter() 140 static void xe_ggtt_set_pte(struct xe_ggtt *ggtt, u64 addr, u64 pte) in xe_ggtt_set_pte() argument 142 xe_tile_assert(ggtt->tile, !(addr & XE_PTE_MASK)); in xe_ggtt_set_pte() 143 xe_tile_assert(ggtt->tile, addr < ggtt->size); in xe_ggtt_set_pte() 145 writeq(pte, &ggtt->gsm[addr >> XE_PTE_SHIFT]); in xe_ggtt_set_pte() 148 static void xe_ggtt_set_pte_and_flush(struct xe_ggtt *ggtt, u64 addr, u64 pte) in xe_ggtt_set_pte_and_flush() argument [all …]
|
| H A D | xe_sched_job_types.h | 65 bool ggtt; member
|
| H A D | xe_gt_sriov_pf_config.c | 348 u64 ggtt_size = gt_to_tile(gt)->mem.ggtt->size - ggtt_start; in pf_push_full_vf_config() 463 struct xe_ggtt *ggtt = tile->mem.ggtt; in pf_provision_vf_ggtt() local 489 node = xe_ggtt_node_init(ggtt); in pf_provision_vf_ggtt() 668 struct xe_ggtt *ggtt = gt_to_tile(gt)->mem.ggtt; in pf_get_max_ggtt() local 673 max_hole = xe_ggtt_largest_hole(ggtt, alignment, &spare); in pf_get_max_ggtt() 2908 struct xe_ggtt *ggtt = gt_to_tile(gt)->mem.ggtt; in xe_gt_sriov_pf_config_print_available_ggtt() local 2918 total = xe_ggtt_print_holes(ggtt, alignment, p); in xe_gt_sriov_pf_config_print_available_ggtt()
|
| H A D | xe_bo_evict.c | 208 xe_ggtt_map_bo_unlocked(tile->mem.ggtt, bo); in xe_bo_restore_and_map_ggtt()
|
| /linux/drivers/gpu/drm/i915/selftests/ |
| H A D | i915_gem_evict.c | 48 static int populate_ggtt(struct i915_ggtt *ggtt, struct list_head *objects) in populate_ggtt() argument 57 obj = i915_gem_object_create_internal(ggtt->vm.i915, in populate_ggtt() 75 count, ggtt->vm.total / PAGE_SIZE); in populate_ggtt() 77 if (list_empty(&ggtt->vm.bound_list)) { in populate_ggtt() 85 static void unpin_ggtt(struct i915_ggtt *ggtt) in unpin_ggtt() argument 89 list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) in unpin_ggtt() 94 static void cleanup_objects(struct i915_ggtt *ggtt, struct list_head *list) in cleanup_objects() argument 104 i915_gem_drain_freed_objects(ggtt->vm.i915); in cleanup_objects() 110 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_something() local 116 err = populate_ggtt(ggtt, &objects); in igt_evict_something() [all …]
|
| H A D | mock_gtt.c | 109 struct i915_ggtt *ggtt = gt->ggtt; in mock_init_ggtt() local 111 ggtt->vm.gt = gt; in mock_init_ggtt() 112 ggtt->vm.i915 = gt->i915; in mock_init_ggtt() 113 ggtt->vm.is_ggtt = true; in mock_init_ggtt() 115 ggtt->gmadr = DEFINE_RES_MEM(0, 2048 * PAGE_SIZE); in mock_init_ggtt() 116 ggtt->mappable_end = resource_size(&ggtt->gmadr); in mock_init_ggtt() 117 ggtt->vm.total = 4096 * PAGE_SIZE; in mock_init_ggtt() 119 ggtt->vm.alloc_pt_dma = alloc_pt_dma; in mock_init_ggtt() 120 ggtt->vm.alloc_scratch_dma = alloc_pt_dma; in mock_init_ggtt() 122 ggtt->vm.clear_range = mock_clear_range; in mock_init_ggtt() [all …]
|
| H A D | mock_gtt.h | 33 void mock_fini_ggtt(struct i915_ggtt *ggtt);
|
| /linux/drivers/gpu/drm/i915/gt/ |
| H A D | selftest_reset.c | 22 struct i915_ggtt *ggtt = gt->ggtt; in __igt_reset_stolen() local 34 if (!drm_mm_node_allocated(&ggtt->error_capture)) in __igt_reset_stolen() 87 ggtt->vm.insert_page(&ggtt->vm, dma, in __igt_reset_stolen() 88 ggtt->error_capture.start, in __igt_reset_stolen() 94 s = io_mapping_map_wc(&ggtt->iomap, in __igt_reset_stolen() 95 ggtt->error_capture.start, in __igt_reset_stolen() 111 ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE); in __igt_reset_stolen() 130 ggtt->vm.insert_page(&ggtt->vm, dma, in __igt_reset_stolen() 131 ggtt->error_capture.start, in __igt_reset_stolen() 137 s = io_mapping_map_wc(&ggtt->iomap, in __igt_reset_stolen() [all …]
|
| H A D | intel_ggtt_fencing.h | 24 struct i915_ggtt *ggtt; member 44 struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt); 47 void intel_ggtt_restore_fences(struct i915_ggtt *ggtt); 54 void intel_ggtt_init_fences(struct i915_ggtt *ggtt); 55 void intel_ggtt_fini_fences(struct i915_ggtt *ggtt);
|
| H A D | gen6_ppgtt.c | 166 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); in gen6_flush_pd() 286 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in pd_vma_bind() local 291 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset; in pd_vma_bind() 382 struct i915_ggtt * const ggtt = ppgtt->base.vm.gt->ggtt; in gen6_alloc_top_pd() local 402 ppgtt->vma = i915_vma_instance(pd->pt.base, &ggtt->vm, NULL); in gen6_alloc_top_pd() 430 struct i915_ggtt * const ggtt = gt->ggtt; in gen6_ppgtt_create() local 452 ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode; in gen6_ppgtt_create()
|
| H A D | intel_ggtt_gmch.h | 16 int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt); 23 static inline int intel_ggtt_gmch_probe(struct i915_ggtt *ggtt) { return -ENODEV; } in intel_ggtt_gmch_probe() argument
|
| H A D | intel_ring.c | 111 static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size) in create_ring_vma() argument 113 struct i915_address_space *vm = &ggtt->vm; in create_ring_vma() 120 if (IS_ERR(obj) && i915_ggtt_has_aperture(ggtt) && !HAS_LLC(i915)) in create_ring_vma() 174 vma = create_ring_vma(engine->gt->ggtt, size); in intel_engine_create_ring()
|
| H A D | intel_gt_types.h | 103 struct i915_ggtt *ggtt; member 322 #define intel_gt_support_legacy_fencing(gt) ((gt)->ggtt->num_fences > 0)
|
| H A D | mock_engine.c | 38 static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size) in create_ring_vma() argument 40 struct i915_address_space *vm = &ggtt->vm; in create_ring_vma() 75 ring->vma = create_ring_vma(engine->gt->ggtt, PAGE_SIZE); in mock_ring()
|
| H A D | selftest_mocs.c | 80 __vm_create_scratch_for_read_pinned(>->ggtt->vm, PAGE_SIZE); in live_mocs_init()
|
| H A D | selftest_workarounds.c | 125 vma = i915_vma_instance(result, &engine->gt->ggtt->vm, NULL); in read_nonprivs()
|
| /linux/drivers/gpu/drm/i915/ |
| H A D | i915_gem_gtt.c | 59 struct i915_ggtt *ggtt = to_gt(i915)->ggtt; in i915_gem_gtt_finish_pages() local 62 if (unlikely(ggtt->do_idle_maps)) in i915_gem_gtt_finish_pages() 108 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_reserve() 208 GEM_BUG_ON(vm == &to_gt(vm->i915)->ggtt->alias->vm); in i915_gem_gtt_insert()
|
| H A D | i915_gem_evict.c | 48 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in ggtt_flush() local 52 list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) { in ggtt_flush() 188 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in i915_gem_evict_something() local 190 list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) in i915_gem_evict_something() 353 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in i915_gem_evict_for_node() local 356 list_for_each_entry(gt, &ggtt->gt_list, ggtt_link) in i915_gem_evict_for_node()
|
| H A D | i915_vgpu.h | 39 int intel_vgt_balloon(struct i915_ggtt *ggtt); 40 void intel_vgt_deballoon(struct i915_ggtt *ggtt);
|
| /linux/drivers/gpu/drm/xe/tests/ |
| H A D | xe_guc_buf_kunit.c | 39 struct xe_ggtt *ggtt = tile->mem.ggtt; in replacement_xe_managed_bo_create_pin_map() local 41 bo->ggtt_node[tile->id] = xe_ggtt_node_init(ggtt); in replacement_xe_managed_bo_create_pin_map() 59 struct xe_ggtt *ggtt; in guc_buf_test_init() local 65 ggtt = xe_device_get_root_tile(test->priv)->mem.ggtt; in guc_buf_test_init() 69 xe_ggtt_init_kunit(ggtt, DUT_GGTT_START, in guc_buf_test_init()
|
| /linux/drivers/gpu/drm/i915/gem/selftests/ |
| H A D | i915_gem_object.c | 46 to_gt(i915)->ggtt->vm.total + PAGE_SIZE); in igt_gem_huge()
|
| H A D | i915_gem_coherency.c | 264 return gt->ggtt->num_fences; in needs_fence_registers()
|
| /linux/drivers/gpu/drm/i915/gvt/ |
| H A D | kvmgt.c | 784 aperture_va = io_mapping_map_wc(&vgpu->gvt->gt->ggtt->iomap, in intel_vgpu_aperture_rw()
|