xref: /linux/drivers/gpu/drm/xe/display/xe_dsb_buffer.c (revision 40286d6379aacfcc053253ef78dc78b09addffda)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright 2023, Intel Corporation.
4  */
5 
6 #include <drm/intel/display_parent_interface.h>
7 
8 #include "xe_bo.h"
9 #include "xe_device.h"
10 #include "xe_device_types.h"
11 #include "xe_dsb_buffer.h"
12 
13 struct intel_dsb_buffer {
14 	u32 *cmd_buf;
15 	struct xe_bo *bo;
16 	size_t buf_size;
17 };
18 
19 static u32 xe_dsb_buffer_ggtt_offset(struct intel_dsb_buffer *dsb_buf)
20 {
21 	return xe_bo_ggtt_addr(dsb_buf->bo);
22 }
23 
24 static void xe_dsb_buffer_write(struct intel_dsb_buffer *dsb_buf, u32 idx, u32 val)
25 {
26 	iosys_map_wr(&dsb_buf->bo->vmap, idx * 4, u32, val);
27 }
28 
29 static u32 xe_dsb_buffer_read(struct intel_dsb_buffer *dsb_buf, u32 idx)
30 {
31 	return iosys_map_rd(&dsb_buf->bo->vmap, idx * 4, u32);
32 }
33 
34 static void xe_dsb_buffer_fill(struct intel_dsb_buffer *dsb_buf, u32 idx, u32 val, size_t size)
35 {
36 	WARN_ON(idx > (dsb_buf->buf_size - size) / sizeof(*dsb_buf->cmd_buf));
37 
38 	iosys_map_memset(&dsb_buf->bo->vmap, idx * 4, val, size);
39 }
40 
41 static struct intel_dsb_buffer *xe_dsb_buffer_create(struct drm_device *drm, size_t size)
42 {
43 	struct xe_device *xe = to_xe_device(drm);
44 	struct intel_dsb_buffer *dsb_buf;
45 	struct xe_bo *obj;
46 	int ret;
47 
48 	dsb_buf = kzalloc_obj(*dsb_buf);
49 	if (!dsb_buf)
50 		return ERR_PTR(-ENOMEM);
51 
52 	/* Set scanout flag for WC mapping */
53 	obj = xe_bo_create_pin_map_novm(xe, xe_device_get_root_tile(xe),
54 					PAGE_ALIGN(size),
55 					ttm_bo_type_kernel,
56 					XE_BO_FLAG_VRAM_IF_DGFX(xe_device_get_root_tile(xe)) |
57 					XE_BO_FLAG_FORCE_WC |
58 					XE_BO_FLAG_GGTT,
59 					false);
60 	if (IS_ERR(obj)) {
61 		ret = PTR_ERR(obj);
62 		goto err_pin_map;
63 	}
64 
65 	dsb_buf->bo = obj;
66 	dsb_buf->buf_size = size;
67 
68 	return dsb_buf;
69 
70 err_pin_map:
71 	kfree(dsb_buf);
72 
73 	return ERR_PTR(ret);
74 }
75 
76 static void xe_dsb_buffer_cleanup(struct intel_dsb_buffer *dsb_buf)
77 {
78 	xe_bo_unpin_map_no_vm(dsb_buf->bo);
79 	kfree(dsb_buf);
80 }
81 
82 static void xe_dsb_buffer_flush_map(struct intel_dsb_buffer *dsb_buf)
83 {
84 	struct xe_device *xe = dsb_buf->bo->tile->xe;
85 
86 	/*
87 	 * The memory barrier here is to ensure coherency of DSB vs MMIO,
88 	 * both for weak ordering archs and discrete cards.
89 	 */
90 	xe_device_wmb(xe);
91 	xe_device_l2_flush(xe);
92 }
93 
94 const struct intel_display_dsb_interface xe_display_dsb_interface = {
95 	.ggtt_offset = xe_dsb_buffer_ggtt_offset,
96 	.write = xe_dsb_buffer_write,
97 	.read = xe_dsb_buffer_read,
98 	.fill = xe_dsb_buffer_fill,
99 	.create = xe_dsb_buffer_create,
100 	.cleanup = xe_dsb_buffer_cleanup,
101 	.flush_map = xe_dsb_buffer_flush_map,
102 };
103