xref: /linux/drivers/gpu/drm/sprd/sprd_dpu.c (revision b07bcf34b6c98a7dd540c94e804c39a4273da37c)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2020 Unisoc Inc.
4  */
5 
6 #include <linux/component.h>
7 #include <linux/delay.h>
8 #include <linux/dma-buf.h>
9 #include <linux/io.h>
10 #include <linux/module.h>
11 #include <linux/of.h>
12 #include <linux/of_address.h>
13 #include <linux/of_device.h>
14 #include <linux/of_graph.h>
15 #include <linux/of_irq.h>
16 #include <linux/wait.h>
17 #include <linux/workqueue.h>
18 
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_crtc_helper.h>
21 #include <drm/drm_fb_cma_helper.h>
22 #include <drm/drm_gem_cma_helper.h>
23 #include <drm/drm_gem_framebuffer_helper.h>
24 #include <drm/drm_plane_helper.h>
25 
26 #include "sprd_drm.h"
27 #include "sprd_dpu.h"
28 
29 /* Global control registers */
30 #define REG_DPU_CTRL	0x04
31 #define REG_DPU_CFG0	0x08
32 #define REG_PANEL_SIZE	0x20
33 #define REG_BLEND_SIZE	0x24
34 #define REG_BG_COLOR	0x2C
35 
36 /* Layer0 control registers */
37 #define REG_LAY_BASE_ADDR0	0x30
38 #define REG_LAY_BASE_ADDR1	0x34
39 #define REG_LAY_BASE_ADDR2	0x38
40 #define REG_LAY_CTRL		0x40
41 #define REG_LAY_SIZE		0x44
42 #define REG_LAY_PITCH		0x48
43 #define REG_LAY_POS		0x4C
44 #define REG_LAY_ALPHA		0x50
45 #define REG_LAY_CROP_START	0x5C
46 
47 /* Interrupt control registers */
48 #define REG_DPU_INT_EN		0x1E0
49 #define REG_DPU_INT_CLR		0x1E4
50 #define REG_DPU_INT_STS		0x1E8
51 
52 /* DPI control registers */
53 #define REG_DPI_CTRL		0x1F0
54 #define REG_DPI_H_TIMING	0x1F4
55 #define REG_DPI_V_TIMING	0x1F8
56 
57 /* MMU control registers */
58 #define REG_MMU_EN			0x800
59 #define REG_MMU_VPN_RANGE		0x80C
60 #define REG_MMU_PPN1			0x83C
61 #define REG_MMU_RANGE1			0x840
62 #define REG_MMU_PPN2			0x844
63 #define REG_MMU_RANGE2			0x848
64 
65 /* Global control bits */
66 #define BIT_DPU_RUN			BIT(0)
67 #define BIT_DPU_STOP			BIT(1)
68 #define BIT_DPU_REG_UPDATE		BIT(2)
69 #define BIT_DPU_IF_EDPI			BIT(0)
70 
71 /* Layer control bits */
72 #define BIT_DPU_LAY_EN				BIT(0)
73 #define BIT_DPU_LAY_LAYER_ALPHA			(0x01 << 2)
74 #define BIT_DPU_LAY_COMBO_ALPHA			(0x02 << 2)
75 #define BIT_DPU_LAY_FORMAT_YUV422_2PLANE		(0x00 << 4)
76 #define BIT_DPU_LAY_FORMAT_YUV420_2PLANE		(0x01 << 4)
77 #define BIT_DPU_LAY_FORMAT_YUV420_3PLANE		(0x02 << 4)
78 #define BIT_DPU_LAY_FORMAT_ARGB8888			(0x03 << 4)
79 #define BIT_DPU_LAY_FORMAT_RGB565			(0x04 << 4)
80 #define BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3		(0x00 << 8)
81 #define BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0		(0x01 << 8)
82 #define BIT_DPU_LAY_NO_SWITCH			(0x00 << 10)
83 #define BIT_DPU_LAY_RB_OR_UV_SWITCH		(0x01 << 10)
84 #define BIT_DPU_LAY_MODE_BLEND_NORMAL		(0x00 << 16)
85 #define BIT_DPU_LAY_MODE_BLEND_PREMULT		(0x01 << 16)
86 #define BIT_DPU_LAY_ROTATION_0		(0x00 << 20)
87 #define BIT_DPU_LAY_ROTATION_90		(0x01 << 20)
88 #define BIT_DPU_LAY_ROTATION_180	(0x02 << 20)
89 #define BIT_DPU_LAY_ROTATION_270	(0x03 << 20)
90 #define BIT_DPU_LAY_ROTATION_0_M	(0x04 << 20)
91 #define BIT_DPU_LAY_ROTATION_90_M	(0x05 << 20)
92 #define BIT_DPU_LAY_ROTATION_180_M	(0x06 << 20)
93 #define BIT_DPU_LAY_ROTATION_270_M	(0x07 << 20)
94 
95 /* Interrupt control & status bits */
96 #define BIT_DPU_INT_DONE		BIT(0)
97 #define BIT_DPU_INT_TE			BIT(1)
98 #define BIT_DPU_INT_ERR			BIT(2)
99 #define BIT_DPU_INT_UPDATE_DONE		BIT(4)
100 #define BIT_DPU_INT_VSYNC		BIT(5)
101 
102 /* DPI control bits */
103 #define BIT_DPU_EDPI_TE_EN		BIT(8)
104 #define BIT_DPU_EDPI_FROM_EXTERNAL_PAD	BIT(10)
105 #define BIT_DPU_DPI_HALT_EN		BIT(16)
106 
107 static const u32 layer_fmts[] = {
108 	DRM_FORMAT_XRGB8888,
109 	DRM_FORMAT_XBGR8888,
110 	DRM_FORMAT_ARGB8888,
111 	DRM_FORMAT_ABGR8888,
112 	DRM_FORMAT_RGBA8888,
113 	DRM_FORMAT_BGRA8888,
114 	DRM_FORMAT_RGBX8888,
115 	DRM_FORMAT_RGB565,
116 	DRM_FORMAT_BGR565,
117 	DRM_FORMAT_NV12,
118 	DRM_FORMAT_NV21,
119 	DRM_FORMAT_NV16,
120 	DRM_FORMAT_NV61,
121 	DRM_FORMAT_YUV420,
122 	DRM_FORMAT_YVU420,
123 };
124 
125 struct sprd_plane {
126 	struct drm_plane base;
127 };
128 
129 static int dpu_wait_stop_done(struct sprd_dpu *dpu)
130 {
131 	struct dpu_context *ctx = &dpu->ctx;
132 	int rc;
133 
134 	if (ctx->stopped)
135 		return 0;
136 
137 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_stop,
138 					      msecs_to_jiffies(500));
139 	ctx->evt_stop = false;
140 
141 	ctx->stopped = true;
142 
143 	if (!rc) {
144 		drm_err(dpu->drm, "dpu wait for stop done time out!\n");
145 		return -ETIMEDOUT;
146 	}
147 
148 	return 0;
149 }
150 
151 static int dpu_wait_update_done(struct sprd_dpu *dpu)
152 {
153 	struct dpu_context *ctx = &dpu->ctx;
154 	int rc;
155 
156 	ctx->evt_update = false;
157 
158 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_update,
159 					      msecs_to_jiffies(500));
160 
161 	if (!rc) {
162 		drm_err(dpu->drm, "dpu wait for reg update done time out!\n");
163 		return -ETIMEDOUT;
164 	}
165 
166 	return 0;
167 }
168 
169 static u32 drm_format_to_dpu(struct drm_framebuffer *fb)
170 {
171 	u32 format = 0;
172 
173 	switch (fb->format->format) {
174 	case DRM_FORMAT_BGRA8888:
175 		/* BGRA8888 -> ARGB8888 */
176 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
177 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
178 		break;
179 	case DRM_FORMAT_RGBX8888:
180 	case DRM_FORMAT_RGBA8888:
181 		/* RGBA8888 -> ABGR8888 */
182 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
183 		fallthrough;
184 	case DRM_FORMAT_ABGR8888:
185 		/* RB switch */
186 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
187 		fallthrough;
188 	case DRM_FORMAT_ARGB8888:
189 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
190 		break;
191 	case DRM_FORMAT_XBGR8888:
192 		/* RB switch */
193 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
194 		fallthrough;
195 	case DRM_FORMAT_XRGB8888:
196 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
197 		break;
198 	case DRM_FORMAT_BGR565:
199 		/* RB switch */
200 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
201 		fallthrough;
202 	case DRM_FORMAT_RGB565:
203 		format |= BIT_DPU_LAY_FORMAT_RGB565;
204 		break;
205 	case DRM_FORMAT_NV12:
206 		/* 2-Lane: Yuv420 */
207 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
208 		/* Y endian */
209 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
210 		/* UV endian */
211 		format |= BIT_DPU_LAY_NO_SWITCH;
212 		break;
213 	case DRM_FORMAT_NV21:
214 		/* 2-Lane: Yuv420 */
215 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
216 		/* Y endian */
217 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
218 		/* UV endian */
219 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
220 		break;
221 	case DRM_FORMAT_NV16:
222 		/* 2-Lane: Yuv422 */
223 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
224 		/* Y endian */
225 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
226 		/* UV endian */
227 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
228 		break;
229 	case DRM_FORMAT_NV61:
230 		/* 2-Lane: Yuv422 */
231 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
232 		/* Y endian */
233 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
234 		/* UV endian */
235 		format |= BIT_DPU_LAY_NO_SWITCH;
236 		break;
237 	case DRM_FORMAT_YUV420:
238 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
239 		/* Y endian */
240 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
241 		/* UV endian */
242 		format |= BIT_DPU_LAY_NO_SWITCH;
243 		break;
244 	case DRM_FORMAT_YVU420:
245 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
246 		/* Y endian */
247 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
248 		/* UV endian */
249 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
250 		break;
251 	default:
252 		break;
253 	}
254 
255 	return format;
256 }
257 
258 static u32 drm_rotation_to_dpu(struct drm_plane_state *state)
259 {
260 	u32 rotation = 0;
261 
262 	switch (state->rotation) {
263 	default:
264 	case DRM_MODE_ROTATE_0:
265 		rotation = BIT_DPU_LAY_ROTATION_0;
266 		break;
267 	case DRM_MODE_ROTATE_90:
268 		rotation = BIT_DPU_LAY_ROTATION_90;
269 		break;
270 	case DRM_MODE_ROTATE_180:
271 		rotation = BIT_DPU_LAY_ROTATION_180;
272 		break;
273 	case DRM_MODE_ROTATE_270:
274 		rotation = BIT_DPU_LAY_ROTATION_270;
275 		break;
276 	case DRM_MODE_REFLECT_Y:
277 		rotation = BIT_DPU_LAY_ROTATION_180_M;
278 		break;
279 	case (DRM_MODE_REFLECT_Y | DRM_MODE_ROTATE_90):
280 		rotation = BIT_DPU_LAY_ROTATION_90_M;
281 		break;
282 	case DRM_MODE_REFLECT_X:
283 		rotation = BIT_DPU_LAY_ROTATION_0_M;
284 		break;
285 	case (DRM_MODE_REFLECT_X | DRM_MODE_ROTATE_90):
286 		rotation = BIT_DPU_LAY_ROTATION_270_M;
287 		break;
288 	}
289 
290 	return rotation;
291 }
292 
293 static u32 drm_blend_to_dpu(struct drm_plane_state *state)
294 {
295 	u32 blend = 0;
296 
297 	switch (state->pixel_blend_mode) {
298 	case DRM_MODE_BLEND_COVERAGE:
299 		/* alpha mode select - combo alpha */
300 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
301 		/* Normal mode */
302 		blend |= BIT_DPU_LAY_MODE_BLEND_NORMAL;
303 		break;
304 	case DRM_MODE_BLEND_PREMULTI:
305 		/* alpha mode select - combo alpha */
306 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
307 		/* Pre-mult mode */
308 		blend |= BIT_DPU_LAY_MODE_BLEND_PREMULT;
309 		break;
310 	case DRM_MODE_BLEND_PIXEL_NONE:
311 	default:
312 		/* don't do blending, maybe RGBX */
313 		/* alpha mode select - layer alpha */
314 		blend |= BIT_DPU_LAY_LAYER_ALPHA;
315 		break;
316 	}
317 
318 	return blend;
319 }
320 
321 static void sprd_dpu_layer(struct sprd_dpu *dpu, struct drm_plane_state *state)
322 {
323 	struct dpu_context *ctx = &dpu->ctx;
324 	struct drm_gem_cma_object *cma_obj;
325 	struct drm_framebuffer *fb = state->fb;
326 	u32 addr, size, offset, pitch, blend, format, rotation;
327 	u32 src_x = state->src_x >> 16;
328 	u32 src_y = state->src_y >> 16;
329 	u32 src_w = state->src_w >> 16;
330 	u32 src_h = state->src_h >> 16;
331 	u32 dst_x = state->crtc_x;
332 	u32 dst_y = state->crtc_y;
333 	u32 alpha = state->alpha;
334 	u32 index = state->zpos;
335 	int i;
336 
337 	offset = (dst_x & 0xffff) | (dst_y << 16);
338 	size = (src_w & 0xffff) | (src_h << 16);
339 
340 	for (i = 0; i < fb->format->num_planes; i++) {
341 		cma_obj = drm_fb_cma_get_gem_obj(fb, i);
342 		addr = cma_obj->paddr + fb->offsets[i];
343 
344 		if (i == 0)
345 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR0, addr, index);
346 		else if (i == 1)
347 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR1, addr, index);
348 		else
349 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR2, addr, index);
350 	}
351 
352 	if (fb->format->num_planes == 3) {
353 		/* UV pitch is 1/2 of Y pitch */
354 		pitch = (fb->pitches[0] / fb->format->cpp[0]) |
355 				(fb->pitches[0] / fb->format->cpp[0] << 15);
356 	} else {
357 		pitch = fb->pitches[0] / fb->format->cpp[0];
358 	}
359 
360 	layer_reg_wr(ctx, REG_LAY_POS, offset, index);
361 	layer_reg_wr(ctx, REG_LAY_SIZE, size, index);
362 	layer_reg_wr(ctx, REG_LAY_CROP_START,
363 		     src_y << 16 | src_x, index);
364 	layer_reg_wr(ctx, REG_LAY_ALPHA, alpha, index);
365 	layer_reg_wr(ctx, REG_LAY_PITCH, pitch, index);
366 
367 	format = drm_format_to_dpu(fb);
368 	blend = drm_blend_to_dpu(state);
369 	rotation = drm_rotation_to_dpu(state);
370 
371 	layer_reg_wr(ctx, REG_LAY_CTRL, BIT_DPU_LAY_EN |
372 				format |
373 				blend |
374 				rotation,
375 				index);
376 }
377 
378 static void sprd_dpu_flip(struct sprd_dpu *dpu)
379 {
380 	struct dpu_context *ctx = &dpu->ctx;
381 
382 	/*
383 	 * Make sure the dpu is in stop status. DPU has no shadow
384 	 * registers in EDPI mode. So the config registers can only be
385 	 * updated in the rising edge of DPU_RUN bit.
386 	 */
387 	if (ctx->if_type == SPRD_DPU_IF_EDPI)
388 		dpu_wait_stop_done(dpu);
389 
390 	/* update trigger and wait */
391 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
392 		if (!ctx->stopped) {
393 			dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_REG_UPDATE);
394 			dpu_wait_update_done(dpu);
395 		}
396 
397 		dpu_reg_set(ctx, REG_DPU_INT_EN, BIT_DPU_INT_ERR);
398 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
399 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
400 
401 		ctx->stopped = false;
402 	}
403 }
404 
405 static void sprd_dpu_init(struct sprd_dpu *dpu)
406 {
407 	struct dpu_context *ctx = &dpu->ctx;
408 	u32 int_mask = 0;
409 
410 	writel(0x00, ctx->base + REG_BG_COLOR);
411 	writel(0x00, ctx->base + REG_MMU_EN);
412 	writel(0x00, ctx->base + REG_MMU_PPN1);
413 	writel(0xffff, ctx->base + REG_MMU_RANGE1);
414 	writel(0x00, ctx->base + REG_MMU_PPN2);
415 	writel(0xffff, ctx->base + REG_MMU_RANGE2);
416 	writel(0x1ffff, ctx->base + REG_MMU_VPN_RANGE);
417 
418 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
419 		/* use dpi as interface */
420 		dpu_reg_clr(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
421 		/* disable Halt function for SPRD DSI */
422 		dpu_reg_clr(ctx, REG_DPI_CTRL, BIT_DPU_DPI_HALT_EN);
423 		/* select te from external pad */
424 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
425 
426 		/* enable dpu update done INT */
427 		int_mask |= BIT_DPU_INT_UPDATE_DONE;
428 		/* enable dpu done INT */
429 		int_mask |= BIT_DPU_INT_DONE;
430 		/* enable dpu dpi vsync */
431 		int_mask |= BIT_DPU_INT_VSYNC;
432 		/* enable dpu TE INT */
433 		int_mask |= BIT_DPU_INT_TE;
434 		/* enable underflow err INT */
435 		int_mask |= BIT_DPU_INT_ERR;
436 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
437 		/* use edpi as interface */
438 		dpu_reg_set(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
439 		/* use external te */
440 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
441 		/* enable te */
442 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_TE_EN);
443 
444 		/* enable stop done INT */
445 		int_mask |= BIT_DPU_INT_DONE;
446 		/* enable TE INT */
447 		int_mask |= BIT_DPU_INT_TE;
448 	}
449 
450 	writel(int_mask, ctx->base + REG_DPU_INT_EN);
451 }
452 
453 static void sprd_dpu_fini(struct sprd_dpu *dpu)
454 {
455 	struct dpu_context *ctx = &dpu->ctx;
456 
457 	writel(0x00, ctx->base + REG_DPU_INT_EN);
458 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
459 }
460 
461 static void sprd_dpi_init(struct sprd_dpu *dpu)
462 {
463 	struct dpu_context *ctx = &dpu->ctx;
464 	u32 reg_val;
465 	u32 size;
466 
467 	size = (ctx->vm.vactive << 16) | ctx->vm.hactive;
468 	writel(size, ctx->base + REG_PANEL_SIZE);
469 	writel(size, ctx->base + REG_BLEND_SIZE);
470 
471 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
472 		/* set dpi timing */
473 		reg_val = ctx->vm.hsync_len << 0 |
474 			  ctx->vm.hback_porch << 8 |
475 			  ctx->vm.hfront_porch << 20;
476 		writel(reg_val, ctx->base + REG_DPI_H_TIMING);
477 
478 		reg_val = ctx->vm.vsync_len << 0 |
479 			  ctx->vm.vback_porch << 8 |
480 			  ctx->vm.vfront_porch << 20;
481 		writel(reg_val, ctx->base + REG_DPI_V_TIMING);
482 	}
483 }
484 
485 void sprd_dpu_run(struct sprd_dpu *dpu)
486 {
487 	struct dpu_context *ctx = &dpu->ctx;
488 
489 	dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
490 
491 	ctx->stopped = false;
492 }
493 
494 void sprd_dpu_stop(struct sprd_dpu *dpu)
495 {
496 	struct dpu_context *ctx = &dpu->ctx;
497 
498 	if (ctx->if_type == SPRD_DPU_IF_DPI)
499 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_STOP);
500 
501 	dpu_wait_stop_done(dpu);
502 }
503 
504 static int sprd_plane_atomic_check(struct drm_plane *plane,
505 				   struct drm_atomic_state *state)
506 {
507 	struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state,
508 									     plane);
509 	struct drm_crtc_state *crtc_state;
510 	u32 fmt;
511 
512 	if (!plane_state->fb || !plane_state->crtc)
513 		return 0;
514 
515 	fmt = drm_format_to_dpu(plane_state->fb);
516 	if (!fmt)
517 		return -EINVAL;
518 
519 	crtc_state = drm_atomic_get_crtc_state(plane_state->state, plane_state->crtc);
520 	if (IS_ERR(crtc_state))
521 		return PTR_ERR(crtc_state);
522 
523 	return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
524 						  DRM_PLANE_HELPER_NO_SCALING,
525 						  DRM_PLANE_HELPER_NO_SCALING,
526 						  true, true);
527 }
528 
529 static void sprd_plane_atomic_update(struct drm_plane *drm_plane,
530 				     struct drm_atomic_state *state)
531 {
532 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state,
533 									   drm_plane);
534 	struct sprd_dpu *dpu = to_sprd_crtc(new_state->crtc);
535 
536 	/* start configure dpu layers */
537 	sprd_dpu_layer(dpu, new_state);
538 }
539 
540 static void sprd_plane_atomic_disable(struct drm_plane *drm_plane,
541 				      struct drm_atomic_state *state)
542 {
543 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
544 									   drm_plane);
545 	struct sprd_dpu *dpu = to_sprd_crtc(old_state->crtc);
546 
547 	layer_reg_wr(&dpu->ctx, REG_LAY_CTRL, 0x00, old_state->zpos);
548 }
549 
550 static void sprd_plane_create_properties(struct sprd_plane *plane, int index)
551 {
552 	unsigned int supported_modes = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
553 				       BIT(DRM_MODE_BLEND_PREMULTI) |
554 				       BIT(DRM_MODE_BLEND_COVERAGE);
555 
556 	/* create rotation property */
557 	drm_plane_create_rotation_property(&plane->base,
558 					   DRM_MODE_ROTATE_0,
559 					   DRM_MODE_ROTATE_MASK |
560 					   DRM_MODE_REFLECT_MASK);
561 
562 	/* create alpha property */
563 	drm_plane_create_alpha_property(&plane->base);
564 
565 	/* create blend mode property */
566 	drm_plane_create_blend_mode_property(&plane->base, supported_modes);
567 
568 	/* create zpos property */
569 	drm_plane_create_zpos_immutable_property(&plane->base, index);
570 }
571 
572 static const struct drm_plane_helper_funcs sprd_plane_helper_funcs = {
573 	.atomic_check = sprd_plane_atomic_check,
574 	.atomic_update = sprd_plane_atomic_update,
575 	.atomic_disable = sprd_plane_atomic_disable,
576 };
577 
578 static const struct drm_plane_funcs sprd_plane_funcs = {
579 	.update_plane = drm_atomic_helper_update_plane,
580 	.disable_plane	= drm_atomic_helper_disable_plane,
581 	.destroy = drm_plane_cleanup,
582 	.reset = drm_atomic_helper_plane_reset,
583 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
584 	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
585 };
586 
587 static struct sprd_plane *sprd_planes_init(struct drm_device *drm)
588 {
589 	struct sprd_plane *plane, *primary;
590 	enum drm_plane_type plane_type;
591 	int i;
592 
593 	for (i = 0; i < 6; i++) {
594 		plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
595 					DRM_PLANE_TYPE_OVERLAY;
596 
597 		plane = drmm_universal_plane_alloc(drm, struct sprd_plane, base,
598 						   1, &sprd_plane_funcs,
599 						   layer_fmts, ARRAY_SIZE(layer_fmts),
600 						   NULL, plane_type, NULL);
601 		if (IS_ERR(plane)) {
602 			drm_err(drm, "failed to init drm plane: %d\n", i);
603 			return plane;
604 		}
605 
606 		drm_plane_helper_add(&plane->base, &sprd_plane_helper_funcs);
607 
608 		sprd_plane_create_properties(plane, i);
609 
610 		if (i == 0)
611 			primary = plane;
612 	}
613 
614 	return primary;
615 }
616 
617 static void sprd_crtc_mode_set_nofb(struct drm_crtc *crtc)
618 {
619 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
620 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
621 
622 	drm_display_mode_to_videomode(mode, &dpu->ctx.vm);
623 
624 	sprd_dpi_init(dpu);
625 }
626 
627 static void sprd_crtc_atomic_enable(struct drm_crtc *crtc,
628 				    struct drm_atomic_state *state)
629 {
630 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
631 
632 	sprd_dpu_init(dpu);
633 
634 	drm_crtc_vblank_on(&dpu->base);
635 }
636 
637 static void sprd_crtc_atomic_disable(struct drm_crtc *crtc,
638 				     struct drm_atomic_state *state)
639 {
640 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
641 	struct drm_device *drm = dpu->base.dev;
642 
643 	drm_crtc_vblank_off(&dpu->base);
644 
645 	sprd_dpu_fini(dpu);
646 
647 	spin_lock_irq(&drm->event_lock);
648 	if (crtc->state->event) {
649 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
650 		crtc->state->event = NULL;
651 	}
652 	spin_unlock_irq(&drm->event_lock);
653 }
654 
655 static void sprd_crtc_atomic_flush(struct drm_crtc *crtc,
656 				   struct drm_atomic_state *state)
657 
658 {
659 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
660 	struct drm_device *drm = dpu->base.dev;
661 
662 	sprd_dpu_flip(dpu);
663 
664 	spin_lock_irq(&drm->event_lock);
665 	if (crtc->state->event) {
666 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
667 		crtc->state->event = NULL;
668 	}
669 	spin_unlock_irq(&drm->event_lock);
670 }
671 
672 static int sprd_crtc_enable_vblank(struct drm_crtc *crtc)
673 {
674 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
675 
676 	dpu_reg_set(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
677 
678 	return 0;
679 }
680 
681 static void sprd_crtc_disable_vblank(struct drm_crtc *crtc)
682 {
683 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
684 
685 	dpu_reg_clr(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
686 }
687 
688 static const struct drm_crtc_helper_funcs sprd_crtc_helper_funcs = {
689 	.mode_set_nofb	= sprd_crtc_mode_set_nofb,
690 	.atomic_flush	= sprd_crtc_atomic_flush,
691 	.atomic_enable	= sprd_crtc_atomic_enable,
692 	.atomic_disable	= sprd_crtc_atomic_disable,
693 };
694 
695 static const struct drm_crtc_funcs sprd_crtc_funcs = {
696 	.destroy	= drm_crtc_cleanup,
697 	.set_config	= drm_atomic_helper_set_config,
698 	.page_flip	= drm_atomic_helper_page_flip,
699 	.reset		= drm_atomic_helper_crtc_reset,
700 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
701 	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
702 	.enable_vblank	= sprd_crtc_enable_vblank,
703 	.disable_vblank	= sprd_crtc_disable_vblank,
704 };
705 
706 static struct sprd_dpu *sprd_crtc_init(struct drm_device *drm,
707 				       struct drm_plane *primary, struct device *dev)
708 {
709 	struct device_node *port;
710 	struct sprd_dpu *dpu;
711 
712 	dpu = drmm_crtc_alloc_with_planes(drm, struct sprd_dpu, base,
713 					  primary, NULL,
714 					&sprd_crtc_funcs, NULL);
715 	if (IS_ERR(dpu)) {
716 		drm_err(drm, "failed to init crtc\n");
717 		return dpu;
718 	}
719 	drm_crtc_helper_add(&dpu->base, &sprd_crtc_helper_funcs);
720 
721 	/*
722 	 * set crtc port so that drm_of_find_possible_crtcs call works
723 	 */
724 	port = of_graph_get_port_by_id(dev->of_node, 0);
725 	if (!port) {
726 		drm_err(drm, "failed to found crtc output port for %s\n",
727 			dev->of_node->full_name);
728 		return ERR_PTR(-EINVAL);
729 	}
730 	dpu->base.port = port;
731 	of_node_put(port);
732 
733 	return dpu;
734 }
735 
736 static irqreturn_t sprd_dpu_isr(int irq, void *data)
737 {
738 	struct sprd_dpu *dpu = data;
739 	struct dpu_context *ctx = &dpu->ctx;
740 	u32 reg_val, int_mask = 0;
741 
742 	reg_val = readl(ctx->base + REG_DPU_INT_STS);
743 
744 	/* disable err interrupt */
745 	if (reg_val & BIT_DPU_INT_ERR) {
746 		int_mask |= BIT_DPU_INT_ERR;
747 		drm_warn(dpu->drm, "Warning: dpu underflow!\n");
748 	}
749 
750 	/* dpu update done isr */
751 	if (reg_val & BIT_DPU_INT_UPDATE_DONE) {
752 		ctx->evt_update = true;
753 		wake_up_interruptible_all(&ctx->wait_queue);
754 	}
755 
756 	/* dpu stop done isr */
757 	if (reg_val & BIT_DPU_INT_DONE) {
758 		ctx->evt_stop = true;
759 		wake_up_interruptible_all(&ctx->wait_queue);
760 	}
761 
762 	if (reg_val & BIT_DPU_INT_VSYNC)
763 		drm_crtc_handle_vblank(&dpu->base);
764 
765 	writel(reg_val, ctx->base + REG_DPU_INT_CLR);
766 	dpu_reg_clr(ctx, REG_DPU_INT_EN, int_mask);
767 
768 	return IRQ_HANDLED;
769 }
770 
771 static int sprd_dpu_context_init(struct sprd_dpu *dpu,
772 				 struct device *dev)
773 {
774 	struct platform_device *pdev = to_platform_device(dev);
775 	struct dpu_context *ctx = &dpu->ctx;
776 	struct resource *res;
777 	int ret;
778 
779 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
780 	ctx->base = devm_ioremap(dev, res->start, resource_size(res));
781 	if (!ctx->base) {
782 		dev_err(dev, "failed to map dpu registers\n");
783 		return -EFAULT;
784 	}
785 
786 	ctx->irq = platform_get_irq(pdev, 0);
787 	if (ctx->irq < 0) {
788 		dev_err(dev, "failed to get dpu irq\n");
789 		return ctx->irq;
790 	}
791 
792 	/* disable and clear interrupts before register dpu IRQ. */
793 	writel(0x00, ctx->base + REG_DPU_INT_EN);
794 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
795 
796 	ret = devm_request_irq(dev, ctx->irq, sprd_dpu_isr,
797 			       IRQF_TRIGGER_NONE, "DPU", dpu);
798 	if (ret) {
799 		dev_err(dev, "failed to register dpu irq handler\n");
800 		return ret;
801 	}
802 
803 	init_waitqueue_head(&ctx->wait_queue);
804 
805 	return 0;
806 }
807 
808 static int sprd_dpu_bind(struct device *dev, struct device *master, void *data)
809 {
810 	struct drm_device *drm = data;
811 	struct sprd_dpu *dpu;
812 	struct sprd_plane *plane;
813 	int ret;
814 
815 	plane = sprd_planes_init(drm);
816 	if (IS_ERR(plane))
817 		return PTR_ERR(plane);
818 
819 	dpu = sprd_crtc_init(drm, &plane->base, dev);
820 	if (IS_ERR(dpu))
821 		return PTR_ERR(dpu);
822 
823 	dpu->drm = drm;
824 	dev_set_drvdata(dev, dpu);
825 
826 	ret = sprd_dpu_context_init(dpu, dev);
827 	if (ret)
828 		return ret;
829 
830 	return 0;
831 }
832 
833 static const struct component_ops dpu_component_ops = {
834 	.bind = sprd_dpu_bind,
835 };
836 
837 static const struct of_device_id dpu_match_table[] = {
838 	{ .compatible = "sprd,sharkl3-dpu" },
839 	{ /* sentinel */ },
840 };
841 MODULE_DEVICE_TABLE(of, dpu_match_table);
842 
843 static int sprd_dpu_probe(struct platform_device *pdev)
844 {
845 	return component_add(&pdev->dev, &dpu_component_ops);
846 }
847 
848 static int sprd_dpu_remove(struct platform_device *pdev)
849 {
850 	component_del(&pdev->dev, &dpu_component_ops);
851 
852 	return 0;
853 }
854 
855 struct platform_driver sprd_dpu_driver = {
856 	.probe = sprd_dpu_probe,
857 	.remove = sprd_dpu_remove,
858 	.driver = {
859 		.name = "sprd-dpu-drv",
860 		.of_match_table = dpu_match_table,
861 	},
862 };
863 
864 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
865 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
866 MODULE_DESCRIPTION("Unisoc Display Controller Driver");
867 MODULE_LICENSE("GPL v2");
868