xref: /linux/drivers/gpu/drm/xe/display/xe_dsb_buffer.c (revision 42b16d3ac371a2fac9b6f08fd75f23f34ba3955a)
1a754391fSAnimesh Manna // SPDX-License-Identifier: MIT
2a754391fSAnimesh Manna /*
3a754391fSAnimesh Manna  * Copyright 2023, Intel Corporation.
4a754391fSAnimesh Manna  */
5a754391fSAnimesh Manna 
6a754391fSAnimesh Manna #include "i915_vma.h"
7a754391fSAnimesh Manna #include "intel_display_types.h"
8a754391fSAnimesh Manna #include "intel_dsb_buffer.h"
9a754391fSAnimesh Manna #include "xe_bo.h"
10*27cb2b7fSMatthew Auld #include "xe_device.h"
11*27cb2b7fSMatthew Auld #include "xe_device_types.h"
12a754391fSAnimesh Manna 
intel_dsb_buffer_ggtt_offset(struct intel_dsb_buffer * dsb_buf)13a754391fSAnimesh Manna u32 intel_dsb_buffer_ggtt_offset(struct intel_dsb_buffer *dsb_buf)
14a754391fSAnimesh Manna {
15a754391fSAnimesh Manna 	return xe_bo_ggtt_addr(dsb_buf->vma->bo);
16a754391fSAnimesh Manna }
17a754391fSAnimesh Manna 
intel_dsb_buffer_write(struct intel_dsb_buffer * dsb_buf,u32 idx,u32 val)18a754391fSAnimesh Manna void intel_dsb_buffer_write(struct intel_dsb_buffer *dsb_buf, u32 idx, u32 val)
19a754391fSAnimesh Manna {
20a754391fSAnimesh Manna 	struct xe_device *xe = dsb_buf->vma->bo->tile->xe;
21*27cb2b7fSMatthew Auld 
22*27cb2b7fSMatthew Auld 	iosys_map_wr(&dsb_buf->vma->bo->vmap, idx * 4, u32, val);
23a754391fSAnimesh Manna 	xe_device_l2_flush(xe);
24*27cb2b7fSMatthew Auld }
25a754391fSAnimesh Manna 
intel_dsb_buffer_read(struct intel_dsb_buffer * dsb_buf,u32 idx)26a754391fSAnimesh Manna u32 intel_dsb_buffer_read(struct intel_dsb_buffer *dsb_buf, u32 idx)
27a754391fSAnimesh Manna {
28a754391fSAnimesh Manna 	return iosys_map_rd(&dsb_buf->vma->bo->vmap, idx * 4, u32);
29a754391fSAnimesh Manna }
30a754391fSAnimesh Manna 
intel_dsb_buffer_memset(struct intel_dsb_buffer * dsb_buf,u32 idx,u32 val,size_t size)31a754391fSAnimesh Manna void intel_dsb_buffer_memset(struct intel_dsb_buffer *dsb_buf, u32 idx, u32 val, size_t size)
32a754391fSAnimesh Manna {
33a754391fSAnimesh Manna 	struct xe_device *xe = dsb_buf->vma->bo->tile->xe;
34*27cb2b7fSMatthew Auld 
35*27cb2b7fSMatthew Auld 	WARN_ON(idx > (dsb_buf->buf_size - size) / sizeof(*dsb_buf->cmd_buf));
36a754391fSAnimesh Manna 
37a754391fSAnimesh Manna 	iosys_map_memset(&dsb_buf->vma->bo->vmap, idx * 4, val, size);
38a754391fSAnimesh Manna 	xe_device_l2_flush(xe);
39*27cb2b7fSMatthew Auld }
40a754391fSAnimesh Manna 
intel_dsb_buffer_create(struct intel_crtc * crtc,struct intel_dsb_buffer * dsb_buf,size_t size)41a754391fSAnimesh Manna bool intel_dsb_buffer_create(struct intel_crtc *crtc, struct intel_dsb_buffer *dsb_buf, size_t size)
42a754391fSAnimesh Manna {
43a754391fSAnimesh Manna 	struct xe_device *xe = to_xe_device(crtc->base.dev);
448e712bd7SJani Nikula 	struct xe_bo *obj;
458e712bd7SJani Nikula 	struct i915_vma *vma;
46a754391fSAnimesh Manna 
47a754391fSAnimesh Manna 	vma = kzalloc(sizeof(*vma), GFP_KERNEL);
48a754391fSAnimesh Manna 	if (!vma)
49a754391fSAnimesh Manna 		return false;
50a754391fSAnimesh Manna 
51a754391fSAnimesh Manna 	obj = xe_bo_create_pin_map(xe, xe_device_get_root_tile(xe),
528e712bd7SJani Nikula 				   NULL, PAGE_ALIGN(size),
53a754391fSAnimesh Manna 				   ttm_bo_type_kernel,
54a754391fSAnimesh Manna 				   XE_BO_FLAG_VRAM_IF_DGFX(xe_device_get_root_tile(xe)) |
558e712bd7SJani Nikula 				   XE_BO_FLAG_GGTT);
5662742d12SLucas De Marchi 	if (IS_ERR(obj)) {
57a754391fSAnimesh Manna 		kfree(vma);
58a754391fSAnimesh Manna 		return false;
59a754391fSAnimesh Manna 	}
60a754391fSAnimesh Manna 
61a754391fSAnimesh Manna 	vma->bo = obj;
62a754391fSAnimesh Manna 	dsb_buf->vma = vma;
63a754391fSAnimesh Manna 	dsb_buf->buf_size = size;
64a754391fSAnimesh Manna 
65a754391fSAnimesh Manna 	return true;
66a754391fSAnimesh Manna }
67a754391fSAnimesh Manna 
intel_dsb_buffer_cleanup(struct intel_dsb_buffer * dsb_buf)68a754391fSAnimesh Manna void intel_dsb_buffer_cleanup(struct intel_dsb_buffer *dsb_buf)
69a754391fSAnimesh Manna {
70a754391fSAnimesh Manna 	xe_bo_unpin_map_no_vm(dsb_buf->vma->bo);
71a754391fSAnimesh Manna 	kfree(dsb_buf->vma);
72a754391fSAnimesh Manna }
73a754391fSAnimesh Manna 
intel_dsb_buffer_flush_map(struct intel_dsb_buffer * dsb_buf)74a754391fSAnimesh Manna void intel_dsb_buffer_flush_map(struct intel_dsb_buffer *dsb_buf)
75a754391fSAnimesh Manna {
76a754391fSAnimesh Manna 	/* TODO: add xe specific flush_map() for dsb buffer object. */
77a754391fSAnimesh Manna }
78a754391fSAnimesh Manna