| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
| H A D | nv20.c | 31 u32 flags, struct nvkm_fb_tile *tile) in nv20_fb_tile_init() argument 33 tile->addr = 0x00000001 | addr; in nv20_fb_tile_init() 34 tile->limit = max(1u, addr + size) - 1; in nv20_fb_tile_init() 35 tile->pitch = pitch; in nv20_fb_tile_init() 37 fb->func->tile.comp(fb, i, size, flags, tile); in nv20_fb_tile_init() 38 tile->addr |= 2; in nv20_fb_tile_init() 44 struct nvkm_fb_tile *tile) in nv20_fb_tile_comp() argument 48 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv20_fb_tile_comp() 49 if (!(flags & 2)) tile->zcomp = 0x00000000; /* Z16 */ in nv20_fb_tile_comp() 50 else tile->zcomp = 0x04000000; /* Z24S8 */ in nv20_fb_tile_comp() [all …]
|
| H A D | nv30.c | 31 u32 flags, struct nvkm_fb_tile *tile) in nv30_fb_tile_init() argument 35 tile->addr = (0 << 4); in nv30_fb_tile_init() 37 if (fb->func->tile.comp) /* z compression */ in nv30_fb_tile_init() 38 fb->func->tile.comp(fb, i, size, flags, tile); in nv30_fb_tile_init() 39 tile->addr = (1 << 4); in nv30_fb_tile_init() 42 tile->addr |= 0x00000001; /* enable */ in nv30_fb_tile_init() 43 tile->addr |= addr; in nv30_fb_tile_init() 44 tile->limit = max(1u, addr + size) - 1; in nv30_fb_tile_init() 45 tile->pitch = pitch; in nv30_fb_tile_init() 50 struct nvkm_fb_tile *tile) in nv30_fb_tile_comp() argument [all …]
|
| H A D | nv10.c | 31 u32 flags, struct nvkm_fb_tile *tile) in nv10_fb_tile_init() argument 33 tile->addr = 0x80000000 | addr; in nv10_fb_tile_init() 34 tile->limit = max(1u, addr + size) - 1; in nv10_fb_tile_init() 35 tile->pitch = pitch; in nv10_fb_tile_init() 39 nv10_fb_tile_fini(struct nvkm_fb *fb, int i, struct nvkm_fb_tile *tile) in nv10_fb_tile_fini() argument 41 tile->addr = 0; in nv10_fb_tile_fini() 42 tile->limit = 0; in nv10_fb_tile_fini() 43 tile->pitch = 0; in nv10_fb_tile_fini() 44 tile->zcomp = 0; in nv10_fb_tile_fini() 48 nv10_fb_tile_prog(struct nvkm_fb *fb, int i, struct nvkm_fb_tile *tile) in nv10_fb_tile_prog() argument [all …]
|
| H A D | nv35.c | 31 struct nvkm_fb_tile *tile) in nv35_fb_tile_comp() argument 35 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv35_fb_tile_comp() 36 if (flags & 2) tile->zcomp |= 0x04000000; /* Z16 */ in nv35_fb_tile_comp() 37 else tile->zcomp |= 0x08000000; /* Z24S8 */ in nv35_fb_tile_comp() 38 tile->zcomp |= ((tile->tag->offset ) >> 6); in nv35_fb_tile_comp() 39 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 6) << 13; in nv35_fb_tile_comp() 41 tile->zcomp |= 0x40000000; in nv35_fb_tile_comp() 50 .tile.regions = 8, 51 .tile.init = nv30_fb_tile_init, 52 .tile.comp = nv35_fb_tile_comp, [all …]
|
| H A D | nv36.c | 31 struct nvkm_fb_tile *tile) in nv36_fb_tile_comp() argument 35 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv36_fb_tile_comp() 36 if (flags & 2) tile->zcomp |= 0x10000000; /* Z16 */ in nv36_fb_tile_comp() 37 else tile->zcomp |= 0x20000000; /* Z24S8 */ in nv36_fb_tile_comp() 38 tile->zcomp |= ((tile->tag->offset ) >> 6); in nv36_fb_tile_comp() 39 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 6) << 14; in nv36_fb_tile_comp() 41 tile->zcomp |= 0x80000000; in nv36_fb_tile_comp() 50 .tile.regions = 8, 51 .tile.init = nv30_fb_tile_init, 52 .tile.comp = nv36_fb_tile_comp, [all …]
|
| H A D | nv40.c | 31 struct nvkm_fb_tile *tile) in nv40_fb_tile_comp() argument 36 !nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv40_fb_tile_comp() 37 tile->zcomp = 0x28000000; /* Z24S8_SPLIT_GRAD */ in nv40_fb_tile_comp() 38 tile->zcomp |= ((tile->tag->offset ) >> 8); in nv40_fb_tile_comp() 39 tile->zcomp |= ((tile->tag->offset + tags - 1) >> 8) << 13; in nv40_fb_tile_comp() 41 tile->zcomp |= 0x40000000; in nv40_fb_tile_comp() 56 .tile.regions = 8, 57 .tile.init = nv30_fb_tile_init, 58 .tile.comp = nv40_fb_tile_comp, 59 .tile.fini = nv20_fb_tile_fini, [all …]
|
| H A D | nv44.c | 31 u32 flags, struct nvkm_fb_tile *tile) in nv44_fb_tile_init() argument 33 tile->addr = 0x00000001; /* mode = vram */ in nv44_fb_tile_init() 34 tile->addr |= addr; in nv44_fb_tile_init() 35 tile->limit = max(1u, addr + size) - 1; in nv44_fb_tile_init() 36 tile->pitch = pitch; in nv44_fb_tile_init() 40 nv44_fb_tile_prog(struct nvkm_fb *fb, int i, struct nvkm_fb_tile *tile) in nv44_fb_tile_prog() argument 43 nvkm_wr32(device, 0x100604 + (i * 0x10), tile->limit); in nv44_fb_tile_prog() 44 nvkm_wr32(device, 0x100608 + (i * 0x10), tile->pitch); in nv44_fb_tile_prog() 45 nvkm_wr32(device, 0x100600 + (i * 0x10), tile->addr); in nv44_fb_tile_prog() 60 .tile.regions = 12, [all …]
|
| H A D | nv25.c | 31 struct nvkm_fb_tile *tile) in nv25_fb_tile_comp() argument 35 if (!nvkm_mm_head(&fb->tags.mm, 0, 1, tags, tags, 1, &tile->tag)) { in nv25_fb_tile_comp() 36 if (!(flags & 2)) tile->zcomp = 0x00100000; /* Z16 */ in nv25_fb_tile_comp() 37 else tile->zcomp = 0x00200000; /* Z24S8 */ in nv25_fb_tile_comp() 38 tile->zcomp |= tile->tag->offset; in nv25_fb_tile_comp() 40 tile->zcomp |= 0x01000000; in nv25_fb_tile_comp() 48 .tile.regions = 8, 49 .tile.init = nv20_fb_tile_init, 50 .tile.comp = nv25_fb_tile_comp, 51 .tile.fini = nv20_fb_tile_fini, [all …]
|
| H A D | nv46.c | 31 u32 flags, struct nvkm_fb_tile *tile) in nv46_fb_tile_init() argument 34 if (!(flags & 4)) tile->addr = (0 << 3); in nv46_fb_tile_init() 35 else tile->addr = (1 << 3); in nv46_fb_tile_init() 37 tile->addr |= 0x00000001; /* mode = vram */ in nv46_fb_tile_init() 38 tile->addr |= addr; in nv46_fb_tile_init() 39 tile->limit = max(1u, addr + size) - 1; in nv46_fb_tile_init() 40 tile->pitch = pitch; in nv46_fb_tile_init() 46 .tile.regions = 15, 47 .tile.init = nv46_fb_tile_init, 48 .tile.fini = nv20_fb_tile_fini, [all …]
|
| H A D | nv41.c | 30 nv41_fb_tile_prog(struct nvkm_fb *fb, int i, struct nvkm_fb_tile *tile) in nv41_fb_tile_prog() argument 33 nvkm_wr32(device, 0x100604 + (i * 0x10), tile->limit); in nv41_fb_tile_prog() 34 nvkm_wr32(device, 0x100608 + (i * 0x10), tile->pitch); in nv41_fb_tile_prog() 35 nvkm_wr32(device, 0x100600 + (i * 0x10), tile->addr); in nv41_fb_tile_prog() 37 nvkm_wr32(device, 0x100700 + (i * 0x04), tile->zcomp); in nv41_fb_tile_prog() 50 .tile.regions = 12, 51 .tile.init = nv30_fb_tile_init, 52 .tile.comp = nv40_fb_tile_comp, 53 .tile.fini = nv20_fb_tile_fini, 54 .tile.prog = nv41_fb_tile_prog,
|
| H A D | base.c | 35 nvkm_fb_tile_fini(struct nvkm_fb *fb, int region, struct nvkm_fb_tile *tile) in nvkm_fb_tile_fini() argument 37 fb->func->tile.fini(fb, region, tile); in nvkm_fb_tile_fini() 42 u32 pitch, u32 flags, struct nvkm_fb_tile *tile) in nvkm_fb_tile_init() argument 44 fb->func->tile.init(fb, region, addr, size, pitch, flags, tile); in nvkm_fb_tile_init() 48 nvkm_fb_tile_prog(struct nvkm_fb *fb, int region, struct nvkm_fb_tile *tile) in nvkm_fb_tile_prog() argument 51 if (fb->func->tile.prog) { in nvkm_fb_tile_prog() 52 fb->func->tile.prog(fb, region, tile); in nvkm_fb_tile_prog() 201 for (i = 0; i < fb->tile.regions; i++) in nvkm_fb_init() 202 fb->func->tile.prog(fb, i, &fb->tile.region[i]); in nvkm_fb_init() 240 for (i = 0; i < fb->tile.regions; i++) in nvkm_fb_dtor() [all …]
|
| H A D | nv49.c | 33 .tile.regions = 15, 34 .tile.init = nv30_fb_tile_init, 35 .tile.comp = nv40_fb_tile_comp, 36 .tile.fini = nv20_fb_tile_fini, 37 .tile.prog = nv41_fb_tile_prog,
|
| H A D | nv47.c | 33 .tile.regions = 15, 34 .tile.init = nv30_fb_tile_init, 35 .tile.comp = nv40_fb_tile_comp, 36 .tile.fini = nv20_fb_tile_fini, 37 .tile.prog = nv41_fb_tile_prog,
|
| H A D | nv1a.c | 31 .tile.regions = 8, 32 .tile.init = nv10_fb_tile_init, 33 .tile.fini = nv10_fb_tile_fini, 34 .tile.prog = nv10_fb_tile_prog,
|
| H A D | nv4e.c | 32 .tile.regions = 12, 33 .tile.init = nv46_fb_tile_init, 34 .tile.fini = nv20_fb_tile_fini, 35 .tile.prog = nv44_fb_tile_prog,
|
| /linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
| H A D | nv44.c | 31 nv44_gr_tile(struct nvkm_gr *base, int i, struct nvkm_fb_tile *tile) in nv44_gr_tile() argument 44 nvkm_wr32(device, NV20_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile() 45 nvkm_wr32(device, NV20_PGRAPH_TLIMIT(i), tile->limit); in nv44_gr_tile() 46 nvkm_wr32(device, NV20_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile() 53 nvkm_wr32(device, NV47_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile() 54 nvkm_wr32(device, NV47_PGRAPH_TLIMIT(i), tile->limit); in nv44_gr_tile() 55 nvkm_wr32(device, NV47_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile() 56 nvkm_wr32(device, NV40_PGRAPH_TSIZE1(i), tile->pitch); in nv44_gr_tile() 57 nvkm_wr32(device, NV40_PGRAPH_TLIMIT1(i), tile->limit); in nv44_gr_tile() 58 nvkm_wr32(device, NV40_PGRAPH_TILE1(i), tile->addr); in nv44_gr_tile() [all …]
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_ggtt.c | 109 struct xe_tile *tile = ggtt->tile; in ggtt_update_access_counter() local 113 if (tile->primary_gt && XE_GT_WA(tile->primary_gt, 22019338487)) { in ggtt_update_access_counter() 114 affected_gt = tile->primary_gt; in ggtt_update_access_counter() 118 xe_tile_assert(tile, IS_DGFX(tile_to_xe(tile))); in ggtt_update_access_counter() 120 affected_gt = tile->media_gt; in ggtt_update_access_counter() 124 xe_tile_assert(tile, !IS_DGFX(tile_to_xe(tile))); in ggtt_update_access_counter() 142 xe_tile_assert(ggtt->tile, !(addr & XE_PTE_MASK)); in xe_ggtt_set_pte() 143 xe_tile_assert(ggtt->tile, addr < ggtt->size); in xe_ggtt_set_pte() 156 xe_tile_assert(ggtt->tile, !(addr & XE_PTE_MASK)); in xe_ggtt_get_pte() 157 xe_tile_assert(ggtt->tile, addr < ggtt->size); in xe_ggtt_get_pte() [all …]
|
| H A D | xe_tile_sysfs.c | 27 struct xe_tile *tile = arg; in tile_sysfs_fini() local 29 kobject_put(tile->sysfs); in tile_sysfs_fini() 32 int xe_tile_sysfs_init(struct xe_tile *tile) in xe_tile_sysfs_init() argument 34 struct xe_device *xe = tile_to_xe(tile); in xe_tile_sysfs_init() 44 kt->tile = tile; in xe_tile_sysfs_init() 46 err = kobject_add(&kt->base, &dev->kobj, "tile%d", tile->id); in xe_tile_sysfs_init() 50 tile->sysfs = &kt->base; in xe_tile_sysfs_init() 52 err = xe_vram_freq_sysfs_init(tile); in xe_tile_sysfs_init() 56 return devm_add_action_or_reset(xe->drm.dev, tile_sysfs_fini, tile); in xe_tile_sysfs_init()
|
| H A D | xe_pt.c | 59 static u64 __xe_pt_empty_pte(struct xe_tile *tile, struct xe_vm *vm, in __xe_pt_empty_pte() argument 62 struct xe_device *xe = tile_to_xe(tile); in __xe_pt_empty_pte() 64 u8 id = tile->id; in __xe_pt_empty_pte() 101 struct xe_pt *xe_pt_create(struct xe_vm *vm, struct xe_tile *tile, in xe_pt_create() argument 119 bo_flags = XE_BO_FLAG_VRAM_IF_DGFX(tile) | in xe_pt_create() 128 bo = xe_bo_create_pin_map(vm->xe, tile, vm, SZ_4K, in xe_pt_create() 141 xe_tile_assert(tile, level <= XE_VM_MAX_LEVEL); in xe_pt_create() 161 void xe_pt_populate_empty(struct xe_tile *tile, struct xe_vm *vm, in xe_pt_populate_empty() argument 175 empty = __xe_pt_empty_pte(tile, vm, pt->level); in xe_pt_populate_empty() 288 struct xe_tile *tile; member [all …]
|
| H A D | xe_bo_evict.c | 162 struct xe_tile *tile; in xe_bo_evict_all() local 181 for_each_tile(tile, xe, id) in xe_bo_evict_all() 182 xe_tile_migrate_wait(tile); in xe_bo_evict_all() 201 struct xe_tile *tile; in xe_bo_restore_and_map_ggtt() local 204 for_each_tile(tile, xe_bo_device(bo), id) { in xe_bo_restore_and_map_ggtt() 205 if (tile != bo->tile && !(bo->flags & XE_BO_FLAG_GGTTx(tile))) in xe_bo_restore_and_map_ggtt() 208 xe_ggtt_map_bo_unlocked(tile->mem.ggtt, bo); in xe_bo_restore_and_map_ggtt() 245 struct xe_tile *tile; in xe_bo_restore_late() local 252 for_each_tile(tile, xe, id) in xe_bo_restore_late() 253 xe_tile_migrate_wait(tile); in xe_bo_restore_late() [all …]
|
| H A D | xe_pcode.h | 15 void xe_pcode_init(struct xe_tile *tile); 18 int xe_pcode_init_min_freq_table(struct xe_tile *tile, u32 min_gt_freq, 20 int xe_pcode_read(struct xe_tile *tile, u32 mbox, u32 *val, u32 *val1); 21 int xe_pcode_write_timeout(struct xe_tile *tile, u32 mbox, u32 val, 23 int xe_pcode_write64_timeout(struct xe_tile *tile, u32 mbox, u32 data0, 26 #define xe_pcode_write(tile, mbox, val) \ argument 27 xe_pcode_write_timeout(tile, mbox, val, 1) 29 int xe_pcode_request(struct xe_tile *tile, u32 mbox, u32 request,
|
| H A D | xe_vram.c | 221 static int tile_vram_size(struct xe_tile *tile, u64 *vram_size, in tile_vram_size() argument 224 struct xe_device *xe = tile_to_xe(tile); in tile_vram_size() 225 struct xe_gt *gt = tile->primary_gt; in tile_vram_size() 235 for_each_if(t->id < tile->id) in tile_vram_size() 238 *tile_size = xe_tile_sriov_vf_lmem(tile); in tile_vram_size() 250 reg = xe_mmio_read32(&tile->mmio, SG_TILE_ADDR_RANGE(tile->id)); in tile_vram_size() 262 offset = xe_mmio_read64_2x32(&tile->mmio, GSMBASE); in tile_vram_size() 274 struct xe_tile *tile; in vram_fini() local 279 for_each_tile(tile, xe, id) { in vram_fini() 280 tile->mem.vram->mapping = NULL; in vram_fini() [all …]
|
| H A D | xe_migrate.c | 49 /** @tile: Backpointer to the tile this struct xe_migrate belongs to. */ 50 struct xe_tile *tile; member 184 static int xe_migrate_prepare_vm(struct xe_tile *tile, struct xe_migrate *m, in xe_migrate_prepare_vm() argument 187 struct xe_device *xe = tile_to_xe(tile); in xe_migrate_prepare_vm() 189 u8 id = tile->id; in xe_migrate_prepare_vm() 195 struct xe_bo *bo, *batch = tile->mem.kernel_bb_pool->bo; in xe_migrate_prepare_vm() 206 xe_tile_assert(tile, m->batch_base_ofs + xe_bo_size(batch) < SZ_2M); in xe_migrate_prepare_vm() 208 bo = xe_bo_create_pin_map(vm->xe, tile, vm, in xe_migrate_prepare_vm() 211 XE_BO_FLAG_VRAM_IF_DGFX(tile) | in xe_migrate_prepare_vm() 402 xe_migrate_alloc(struct xe_tile * tile) xe_migrate_alloc() argument 411 xe_migrate_lock_prepare_vm(struct xe_tile * tile,struct xe_migrate * m,struct xe_vm * vm) xe_migrate_lock_prepare_vm() argument 437 struct xe_tile *tile = m->tile; xe_migrate_init() local 1093 xe_migrate_ccs_rw_copy(struct xe_tile * tile,struct xe_exec_queue * q,struct xe_bo * src_bo,enum xe_sriov_vf_ccs_rw_ctxs read_write) xe_migrate_ccs_rw_copy() argument 1236 struct xe_tile *tile = vram_bo->tile; xe_migrate_vram_copy_chunk() local 1591 write_pgtable(struct xe_tile * tile,struct xe_bb * bb,u64 ppgtt_ofs,const struct xe_vm_pgtable_update_op * pt_op,const struct xe_vm_pgtable_update * update,struct xe_migrate_pt_update * pt_update) write_pgtable() argument 1711 struct xe_tile *tile = m->tile; __xe_migrate_update_pgtables() local 2291 struct xe_tile *tile = m->tile; xe_migrate_access_memory() local [all...] |
| H A D | xe_svm.c | 122 struct xe_tile *tile; in xe_svm_range_notifier_event_begin() local 145 for_each_tile(tile, xe, id) in xe_svm_range_notifier_event_begin() 146 if (xe_pt_zap_ptes_range(tile, vm, range)) { in xe_svm_range_notifier_event_begin() 155 xe_svm_tlb_inval_count_stats_incr(tile->primary_gt); in xe_svm_range_notifier_event_begin() 156 if (tile->media_gt) in xe_svm_range_notifier_event_begin() 157 xe_svm_tlb_inval_count_stats_incr(tile->media_gt); in xe_svm_range_notifier_event_begin() 204 struct xe_tile *tile; in xe_svm_invalidate() local 260 for_each_tile(tile, xe, id) { in xe_svm_invalidate() 262 xe_svm_tlb_inval_us_stats_incr(tile->primary_gt, start); in xe_svm_invalidate() 263 if (tile in xe_svm_invalidate() 786 xe_svm_range_is_valid(struct xe_svm_range * range,struct xe_tile * tile,bool devmem_only) xe_svm_range_is_valid() argument 1025 struct xe_tile *tile = gt_to_tile(gt); global() local 1336 struct xe_tile *tile; xe_svm_ranges_zap_ptes_in_range() local 1372 tile_local_pagemap(struct xe_tile * tile) tile_local_pagemap() argument 1394 xe_vma_resolve_pagemap(struct xe_vma * vma,struct xe_tile * tile) xe_vma_resolve_pagemap() argument 1417 xe_svm_alloc_vram(struct xe_tile * tile,struct xe_svm_range * range,const struct drm_gpusvm_ctx * ctx) xe_svm_alloc_vram() argument 1469 xe_devm_add(struct xe_tile * tile,struct xe_vram_region * vr) xe_devm_add() argument 1509 xe_svm_alloc_vram(struct xe_tile * tile,struct xe_svm_range * range,const struct drm_gpusvm_ctx * ctx) xe_svm_alloc_vram() argument 1516 xe_devm_add(struct xe_tile * tile,struct xe_vram_region * vr) xe_devm_add() argument 1521 xe_vma_resolve_pagemap(struct xe_vma * vma,struct xe_tile * tile) xe_vma_resolve_pagemap() argument [all...] |
| /linux/drivers/gpu/drm/xe/tests/ |
| H A D | xe_bo.c | 24 static int ccs_test_migrate(struct xe_tile *tile, struct xe_bo *bo, in ccs_test_migrate() argument 46 fence = xe_migrate_clear(tile->migrate, bo, bo->ttm.resource, in ccs_test_migrate() 109 offset = xe_device_ccs_bytes(tile_to_xe(tile), xe_bo_size(bo)); in ccs_test_migrate() 126 static void ccs_test_run_tile(struct xe_device *xe, struct xe_tile *tile, in ccs_test_run_tile() argument 134 unsigned int bo_flags = XE_BO_FLAG_VRAM_IF_DGFX(tile); in ccs_test_run_tile() 138 kunit_info(test, "Testing vram id %u\n", tile->id); in ccs_test_run_tile() 152 ret = ccs_test_migrate(tile, bo, false, 0ULL, 0xdeadbeefdeadbeefULL, in ccs_test_run_tile() 158 ret = ccs_test_migrate(tile, bo, false, 0xdeadbeefdeadbeefULL, in ccs_test_run_tile() 164 ret = ccs_test_migrate(tile, bo, true, 0ULL, 0ULL, test, exec); in ccs_test_run_tile() 174 struct xe_tile *tile; in ccs_test_run_device() local [all …]
|