| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_ggtt.c | 556 xe_map_memset(xe, &ggtt->scratch->vmap, 0, 0, xe_bo_size(ggtt->scratch)); in xe_ggtt_init() 697 end = start + xe_bo_size(bo); in xe_ggtt_map_bo() 702 for (xe_res_first_sg(xe_bo_sg(bo), 0, xe_bo_size(bo), &cur); in xe_ggtt_map_bo() 710 for (xe_res_first(bo->ttm.resource, 0, xe_bo_size(bo), &cur); in xe_ggtt_map_bo() 799 xe_tile_assert(ggtt->tile, bo->ggtt_node[tile_id]->base.size == xe_bo_size(bo)); in __xe_ggtt_insert_bo_at() 835 xe_tile_assert(ggtt->tile, end >= start + xe_bo_size(bo)); in __xe_ggtt_insert_bo_at() 838 xe_bo_size(bo), alignment, 0, start, end, 0); in __xe_ggtt_insert_bo_at() 903 xe_tile_assert(ggtt->tile, bo->ggtt_node[tile_id]->base.size == xe_bo_size(bo)); in xe_ggtt_remove_bo()
|
| H A D | xe_migrate.c | 202 xe_tile_assert(tile, m->batch_base_ofs + xe_bo_size(batch) < SZ_2M); in xe_migrate_pt_bo_alloc() 231 pt29_ofs = xe_bo_size(bo) - 3 * XE_PAGE_SIZE; in xe_migrate_prepare_vm() 253 for (i = 0; i < xe_bo_size(batch); in xe_migrate_prepare_vm() 264 xe_tile_assert(tile, xe_bo_size(batch) == SZ_1M); in xe_migrate_prepare_vm() 268 xe_tile_assert(tile, xe_bo_size(batch) == SZ_512K); in xe_migrate_prepare_vm() 270 for (i = 0; i < xe_bo_size(batch); in xe_migrate_prepare_vm() 328 u64 pt30_ofs = xe_bo_size(bo) - 2 * XE_PAGE_SIZE; in xe_migrate_prepare_vm() 343 u64 pt31_ofs = xe_bo_size(bo) - XE_PAGE_SIZE; in xe_migrate_prepare_vm() 875 u64 size = xe_bo_size(src_bo); in __xe_migrate_copy() 905 if (src_bo != dst_bo && XE_WARN_ON(xe_bo_size(src_bo) != xe_bo_size(dst_bo))) in __xe_migrate_copy() [all …]
|
| H A D | xe_bo.h | 389 static inline size_t xe_bo_size(struct xe_bo *bo) in xe_bo_size() function 404 XE_WARN_ON(offset + xe_bo_size(bo) > (1ull << 32)); in __xe_bo_ggtt_addr() 461 return PAGE_ALIGN(xe_bo_size(bo)); in xe_bo_ccs_pages_start()
|
| H A D | xe_mem_pool.c | 89 xe_bo_size(pool->bo), in pool_shadow_init() 198 pool->cpu_addr, xe_bo_size(pool->bo)); in xe_mem_pool_sync()
|
| H A D | xe_trace_bo.h | 36 __entry->size = xe_bo_size(bo); 76 __entry->size = xe_bo_size(bo);
|
| H A D | xe_guc_ct.c | 625 xe_bo_size(ct->ctbs.h2g.bo)); in __xe_guc_ct_start() 627 xe_bo_size(ct->ctbs.g2h.bo)); in __xe_guc_ct_start() 1984 snapshot->ctb_size = xe_bo_size(ct->ctbs.h2g.bo) + in guc_ct_snapshot_alloc() 1985 xe_bo_size(ct->ctbs.g2h.bo); in guc_ct_snapshot_alloc() 2035 xe_bo_size(ct->ctbs.h2g.bo)); in guc_ct_snapshot_capture() 2036 xe_map_memcpy_from(xe, snapshot->ctb + xe_bo_size(ct->ctbs.h2g.bo), in guc_ct_snapshot_capture() 2038 xe_bo_size(ct->ctbs.g2h.bo)); in guc_ct_snapshot_capture()
|
| H A D | xe_gsc.c | 63 xe_bo_size(gsc->private) - fw_size); in memcpy_fw() 86 bb->cs[bb->len++] = (xe_bo_size(gsc->private) / SZ_4K) | in emit_gsc_upload()
|
| H A D | xe_guc_log.c | 128 snapshot->size = xe_bo_size(log->bo); in xe_guc_log_snapshot_alloc() 638 xe_map_memset(xe, &bo->vmap, 0, 0, xe_bo_size(bo)); in xe_guc_log_init()
|
| H A D | xe_bo.c | 491 extra_pages = DIV_ROUND_UP(xe_device_ccs_bytes(xe, xe_bo_size(bo)), in xe_ttm_tt_create() 1356 backup = xe_bo_init_locked(xe, NULL, NULL, bo->ttm.base.resv, NULL, xe_bo_size(bo), in xe_bo_notifier_prepare_pinned() 1440 xe_bo_size(bo)); in xe_bo_evict_pinned_copy() 1497 xe_bo_size(bo), in xe_bo_evict_pinned() 1590 xe_bo_size(bo)); in xe_bo_restore_pinned() 1787 xe_bo_size(bo) - (offset & PAGE_MASK), &cursor); in xe_ttm_access_memory() 2544 start + xe_bo_size(bo), U64_MAX, in __xe_bo_create_locked() 2905 xe_bo_size(*src), dst_flags); in xe_managed_bo_reinit_in_vram() 3214 ret = ttm_bo_kmap(&bo->ttm, 0, xe_bo_size(bo) >> PAGE_SHIFT, &bo->kmap); in xe_bo_vmap()
|
| H A D | xe_drm_client.c | 170 u64 sz = xe_bo_size(bo); in bo_meminfo()
|
| H A D | xe_gt_sriov_pf_migration.c | 613 vram_size = xe_bo_size(vram); in pf_save_vf_vram_mig_data() 651 size = xe_bo_size(vram); in pf_restore_vf_vram_mig_data()
|
| H A D | xe_guc_ads.c | 884 xe_map_memset(ads_to_xe(ads), ads_to_map(ads), 0, 0, xe_bo_size(ads->bo)); in xe_guc_ads_populate_minimal() 908 xe_map_memset(ads_to_xe(ads), ads_to_map(ads), 0, 0, xe_bo_size(ads->bo)); in xe_guc_ads_populate()
|
| H A D | xe_oa.c | 407 int size_exponent = __ffs(xe_bo_size(stream->oa_buffer.bo)); in xe_oa_init_oa_buffer() 439 memset(stream->oa_buffer.vaddr, 0, xe_bo_size(stream->oa_buffer.bo)); in xe_oa_init_oa_buffer() 1067 if (xe_bo_size(stream->oa_buffer.bo) > SZ_16M) in oag_buf_size_select() 1598 struct drm_xe_oa_stream_info info = { .oa_buf_size = xe_bo_size(stream->oa_buffer.bo), }; in xe_oa_info_locked() 1685 if (vma->vm_end - vma->vm_start != xe_bo_size(stream->oa_buffer.bo)) { in xe_oa_mmap()
|
| H A D | xe_lrc.c | 800 u32 offset = xe_bo_size(lrc->bo) - LRC_WA_BB_SIZE - in __xe_lrc_indirect_ring_offset() 811 return xe_bo_size(lrc->bo) - LRC_WA_BB_SIZE - LRC_INDIRECT_CTX_BO_SIZE; in __xe_lrc_indirect_ctx_offset() 816 return xe_bo_size(lrc->bo) - LRC_WA_BB_SIZE; in __xe_lrc_wa_bb_offset()
|
| H A D | xe_guc.c | 69 xe_assert(xe, xe_bo_size(bo) <= GUC_GGTT_TOP - addr); in guc_bo_ggtt_addr() 432 xe_assert(xe, (buf - base + G2G_BUFFER_SIZE) <= xe_bo_size(g2g_bo)); in guc_g2g_register()
|
| H A D | xe_vm.c | 3793 if (XE_IOCTL_DBG(xe, range > xe_bo_size(bo)) || in xe_vm_bind_ioctl_validate_bo() 3795 xe_bo_size(bo) - range)) { in xe_vm_bind_ioctl_validate_bo() 4240 ops = vm_bind_ioctl_ops_create(vm, &vops, bo, 0, addr, xe_bo_size(bo), in xe_vm_bind_kernel_bo()
|
| /linux/drivers/gpu/drm/xe/display/ |
| H A D | xe_stolen.c | 81 return xe_bo_size(node->bo); in xe_stolen_node_size()
|
| H A D | xe_hdcp_gsc.c | 89 xe_map_memset(xe, &bo->vmap, 0, 0, xe_bo_size(bo)); in intel_hdcp_gsc_initialize_message()
|
| /linux/drivers/gpu/drm/xe/tests/ |
| H A D | xe_bo.c | 110 offset = xe_device_ccs_bytes(tile_to_xe(tile), xe_bo_size(bo)); in ccs_test_migrate()
|