xref: /linux/drivers/gpu/drm/sprd/sprd_dpu.c (revision 4359a011e259a4608afc7fb3635370c9d4ba5943)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2020 Unisoc Inc.
4  */
5 
6 #include <linux/component.h>
7 #include <linux/delay.h>
8 #include <linux/dma-buf.h>
9 #include <linux/io.h>
10 #include <linux/module.h>
11 #include <linux/of.h>
12 #include <linux/of_address.h>
13 #include <linux/of_device.h>
14 #include <linux/of_graph.h>
15 #include <linux/of_irq.h>
16 #include <linux/wait.h>
17 #include <linux/workqueue.h>
18 
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_blend.h>
21 #include <drm/drm_crtc_helper.h>
22 #include <drm/drm_fb_cma_helper.h>
23 #include <drm/drm_framebuffer.h>
24 #include <drm/drm_gem_cma_helper.h>
25 #include <drm/drm_gem_framebuffer_helper.h>
26 #include <drm/drm_plane_helper.h>
27 
28 #include "sprd_drm.h"
29 #include "sprd_dpu.h"
30 #include "sprd_dsi.h"
31 
32 /* Global control registers */
33 #define REG_DPU_CTRL	0x04
34 #define REG_DPU_CFG0	0x08
35 #define REG_PANEL_SIZE	0x20
36 #define REG_BLEND_SIZE	0x24
37 #define REG_BG_COLOR	0x2C
38 
39 /* Layer0 control registers */
40 #define REG_LAY_BASE_ADDR0	0x30
41 #define REG_LAY_BASE_ADDR1	0x34
42 #define REG_LAY_BASE_ADDR2	0x38
43 #define REG_LAY_CTRL		0x40
44 #define REG_LAY_SIZE		0x44
45 #define REG_LAY_PITCH		0x48
46 #define REG_LAY_POS		0x4C
47 #define REG_LAY_ALPHA		0x50
48 #define REG_LAY_CROP_START	0x5C
49 
50 /* Interrupt control registers */
51 #define REG_DPU_INT_EN		0x1E0
52 #define REG_DPU_INT_CLR		0x1E4
53 #define REG_DPU_INT_STS		0x1E8
54 
55 /* DPI control registers */
56 #define REG_DPI_CTRL		0x1F0
57 #define REG_DPI_H_TIMING	0x1F4
58 #define REG_DPI_V_TIMING	0x1F8
59 
60 /* MMU control registers */
61 #define REG_MMU_EN			0x800
62 #define REG_MMU_VPN_RANGE		0x80C
63 #define REG_MMU_PPN1			0x83C
64 #define REG_MMU_RANGE1			0x840
65 #define REG_MMU_PPN2			0x844
66 #define REG_MMU_RANGE2			0x848
67 
68 /* Global control bits */
69 #define BIT_DPU_RUN			BIT(0)
70 #define BIT_DPU_STOP			BIT(1)
71 #define BIT_DPU_REG_UPDATE		BIT(2)
72 #define BIT_DPU_IF_EDPI			BIT(0)
73 
74 /* Layer control bits */
75 #define BIT_DPU_LAY_EN				BIT(0)
76 #define BIT_DPU_LAY_LAYER_ALPHA			(0x01 << 2)
77 #define BIT_DPU_LAY_COMBO_ALPHA			(0x02 << 2)
78 #define BIT_DPU_LAY_FORMAT_YUV422_2PLANE		(0x00 << 4)
79 #define BIT_DPU_LAY_FORMAT_YUV420_2PLANE		(0x01 << 4)
80 #define BIT_DPU_LAY_FORMAT_YUV420_3PLANE		(0x02 << 4)
81 #define BIT_DPU_LAY_FORMAT_ARGB8888			(0x03 << 4)
82 #define BIT_DPU_LAY_FORMAT_RGB565			(0x04 << 4)
83 #define BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3		(0x00 << 8)
84 #define BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0		(0x01 << 8)
85 #define BIT_DPU_LAY_NO_SWITCH			(0x00 << 10)
86 #define BIT_DPU_LAY_RB_OR_UV_SWITCH		(0x01 << 10)
87 #define BIT_DPU_LAY_MODE_BLEND_NORMAL		(0x00 << 16)
88 #define BIT_DPU_LAY_MODE_BLEND_PREMULT		(0x01 << 16)
89 #define BIT_DPU_LAY_ROTATION_0		(0x00 << 20)
90 #define BIT_DPU_LAY_ROTATION_90		(0x01 << 20)
91 #define BIT_DPU_LAY_ROTATION_180	(0x02 << 20)
92 #define BIT_DPU_LAY_ROTATION_270	(0x03 << 20)
93 #define BIT_DPU_LAY_ROTATION_0_M	(0x04 << 20)
94 #define BIT_DPU_LAY_ROTATION_90_M	(0x05 << 20)
95 #define BIT_DPU_LAY_ROTATION_180_M	(0x06 << 20)
96 #define BIT_DPU_LAY_ROTATION_270_M	(0x07 << 20)
97 
98 /* Interrupt control & status bits */
99 #define BIT_DPU_INT_DONE		BIT(0)
100 #define BIT_DPU_INT_TE			BIT(1)
101 #define BIT_DPU_INT_ERR			BIT(2)
102 #define BIT_DPU_INT_UPDATE_DONE		BIT(4)
103 #define BIT_DPU_INT_VSYNC		BIT(5)
104 
105 /* DPI control bits */
106 #define BIT_DPU_EDPI_TE_EN		BIT(8)
107 #define BIT_DPU_EDPI_FROM_EXTERNAL_PAD	BIT(10)
108 #define BIT_DPU_DPI_HALT_EN		BIT(16)
109 
110 static const u32 layer_fmts[] = {
111 	DRM_FORMAT_XRGB8888,
112 	DRM_FORMAT_XBGR8888,
113 	DRM_FORMAT_ARGB8888,
114 	DRM_FORMAT_ABGR8888,
115 	DRM_FORMAT_RGBA8888,
116 	DRM_FORMAT_BGRA8888,
117 	DRM_FORMAT_RGBX8888,
118 	DRM_FORMAT_RGB565,
119 	DRM_FORMAT_BGR565,
120 	DRM_FORMAT_NV12,
121 	DRM_FORMAT_NV21,
122 	DRM_FORMAT_NV16,
123 	DRM_FORMAT_NV61,
124 	DRM_FORMAT_YUV420,
125 	DRM_FORMAT_YVU420,
126 };
127 
128 struct sprd_plane {
129 	struct drm_plane base;
130 };
131 
132 static int dpu_wait_stop_done(struct sprd_dpu *dpu)
133 {
134 	struct dpu_context *ctx = &dpu->ctx;
135 	int rc;
136 
137 	if (ctx->stopped)
138 		return 0;
139 
140 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_stop,
141 					      msecs_to_jiffies(500));
142 	ctx->evt_stop = false;
143 
144 	ctx->stopped = true;
145 
146 	if (!rc) {
147 		drm_err(dpu->drm, "dpu wait for stop done time out!\n");
148 		return -ETIMEDOUT;
149 	}
150 
151 	return 0;
152 }
153 
154 static int dpu_wait_update_done(struct sprd_dpu *dpu)
155 {
156 	struct dpu_context *ctx = &dpu->ctx;
157 	int rc;
158 
159 	ctx->evt_update = false;
160 
161 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_update,
162 					      msecs_to_jiffies(500));
163 
164 	if (!rc) {
165 		drm_err(dpu->drm, "dpu wait for reg update done time out!\n");
166 		return -ETIMEDOUT;
167 	}
168 
169 	return 0;
170 }
171 
172 static u32 drm_format_to_dpu(struct drm_framebuffer *fb)
173 {
174 	u32 format = 0;
175 
176 	switch (fb->format->format) {
177 	case DRM_FORMAT_BGRA8888:
178 		/* BGRA8888 -> ARGB8888 */
179 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
180 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
181 		break;
182 	case DRM_FORMAT_RGBX8888:
183 	case DRM_FORMAT_RGBA8888:
184 		/* RGBA8888 -> ABGR8888 */
185 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
186 		fallthrough;
187 	case DRM_FORMAT_ABGR8888:
188 		/* RB switch */
189 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
190 		fallthrough;
191 	case DRM_FORMAT_ARGB8888:
192 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
193 		break;
194 	case DRM_FORMAT_XBGR8888:
195 		/* RB switch */
196 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
197 		fallthrough;
198 	case DRM_FORMAT_XRGB8888:
199 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
200 		break;
201 	case DRM_FORMAT_BGR565:
202 		/* RB switch */
203 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
204 		fallthrough;
205 	case DRM_FORMAT_RGB565:
206 		format |= BIT_DPU_LAY_FORMAT_RGB565;
207 		break;
208 	case DRM_FORMAT_NV12:
209 		/* 2-Lane: Yuv420 */
210 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
211 		/* Y endian */
212 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
213 		/* UV endian */
214 		format |= BIT_DPU_LAY_NO_SWITCH;
215 		break;
216 	case DRM_FORMAT_NV21:
217 		/* 2-Lane: Yuv420 */
218 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
219 		/* Y endian */
220 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
221 		/* UV endian */
222 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
223 		break;
224 	case DRM_FORMAT_NV16:
225 		/* 2-Lane: Yuv422 */
226 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
227 		/* Y endian */
228 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
229 		/* UV endian */
230 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
231 		break;
232 	case DRM_FORMAT_NV61:
233 		/* 2-Lane: Yuv422 */
234 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
235 		/* Y endian */
236 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
237 		/* UV endian */
238 		format |= BIT_DPU_LAY_NO_SWITCH;
239 		break;
240 	case DRM_FORMAT_YUV420:
241 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
242 		/* Y endian */
243 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
244 		/* UV endian */
245 		format |= BIT_DPU_LAY_NO_SWITCH;
246 		break;
247 	case DRM_FORMAT_YVU420:
248 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
249 		/* Y endian */
250 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
251 		/* UV endian */
252 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
253 		break;
254 	default:
255 		break;
256 	}
257 
258 	return format;
259 }
260 
261 static u32 drm_rotation_to_dpu(struct drm_plane_state *state)
262 {
263 	u32 rotation = 0;
264 
265 	switch (state->rotation) {
266 	default:
267 	case DRM_MODE_ROTATE_0:
268 		rotation = BIT_DPU_LAY_ROTATION_0;
269 		break;
270 	case DRM_MODE_ROTATE_90:
271 		rotation = BIT_DPU_LAY_ROTATION_90;
272 		break;
273 	case DRM_MODE_ROTATE_180:
274 		rotation = BIT_DPU_LAY_ROTATION_180;
275 		break;
276 	case DRM_MODE_ROTATE_270:
277 		rotation = BIT_DPU_LAY_ROTATION_270;
278 		break;
279 	case DRM_MODE_REFLECT_Y:
280 		rotation = BIT_DPU_LAY_ROTATION_180_M;
281 		break;
282 	case (DRM_MODE_REFLECT_Y | DRM_MODE_ROTATE_90):
283 		rotation = BIT_DPU_LAY_ROTATION_90_M;
284 		break;
285 	case DRM_MODE_REFLECT_X:
286 		rotation = BIT_DPU_LAY_ROTATION_0_M;
287 		break;
288 	case (DRM_MODE_REFLECT_X | DRM_MODE_ROTATE_90):
289 		rotation = BIT_DPU_LAY_ROTATION_270_M;
290 		break;
291 	}
292 
293 	return rotation;
294 }
295 
296 static u32 drm_blend_to_dpu(struct drm_plane_state *state)
297 {
298 	u32 blend = 0;
299 
300 	switch (state->pixel_blend_mode) {
301 	case DRM_MODE_BLEND_COVERAGE:
302 		/* alpha mode select - combo alpha */
303 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
304 		/* Normal mode */
305 		blend |= BIT_DPU_LAY_MODE_BLEND_NORMAL;
306 		break;
307 	case DRM_MODE_BLEND_PREMULTI:
308 		/* alpha mode select - combo alpha */
309 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
310 		/* Pre-mult mode */
311 		blend |= BIT_DPU_LAY_MODE_BLEND_PREMULT;
312 		break;
313 	case DRM_MODE_BLEND_PIXEL_NONE:
314 	default:
315 		/* don't do blending, maybe RGBX */
316 		/* alpha mode select - layer alpha */
317 		blend |= BIT_DPU_LAY_LAYER_ALPHA;
318 		break;
319 	}
320 
321 	return blend;
322 }
323 
324 static void sprd_dpu_layer(struct sprd_dpu *dpu, struct drm_plane_state *state)
325 {
326 	struct dpu_context *ctx = &dpu->ctx;
327 	struct drm_gem_cma_object *cma_obj;
328 	struct drm_framebuffer *fb = state->fb;
329 	u32 addr, size, offset, pitch, blend, format, rotation;
330 	u32 src_x = state->src_x >> 16;
331 	u32 src_y = state->src_y >> 16;
332 	u32 src_w = state->src_w >> 16;
333 	u32 src_h = state->src_h >> 16;
334 	u32 dst_x = state->crtc_x;
335 	u32 dst_y = state->crtc_y;
336 	u32 alpha = state->alpha;
337 	u32 index = state->zpos;
338 	int i;
339 
340 	offset = (dst_x & 0xffff) | (dst_y << 16);
341 	size = (src_w & 0xffff) | (src_h << 16);
342 
343 	for (i = 0; i < fb->format->num_planes; i++) {
344 		cma_obj = drm_fb_cma_get_gem_obj(fb, i);
345 		addr = cma_obj->paddr + fb->offsets[i];
346 
347 		if (i == 0)
348 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR0, addr, index);
349 		else if (i == 1)
350 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR1, addr, index);
351 		else
352 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR2, addr, index);
353 	}
354 
355 	if (fb->format->num_planes == 3) {
356 		/* UV pitch is 1/2 of Y pitch */
357 		pitch = (fb->pitches[0] / fb->format->cpp[0]) |
358 				(fb->pitches[0] / fb->format->cpp[0] << 15);
359 	} else {
360 		pitch = fb->pitches[0] / fb->format->cpp[0];
361 	}
362 
363 	layer_reg_wr(ctx, REG_LAY_POS, offset, index);
364 	layer_reg_wr(ctx, REG_LAY_SIZE, size, index);
365 	layer_reg_wr(ctx, REG_LAY_CROP_START,
366 		     src_y << 16 | src_x, index);
367 	layer_reg_wr(ctx, REG_LAY_ALPHA, alpha, index);
368 	layer_reg_wr(ctx, REG_LAY_PITCH, pitch, index);
369 
370 	format = drm_format_to_dpu(fb);
371 	blend = drm_blend_to_dpu(state);
372 	rotation = drm_rotation_to_dpu(state);
373 
374 	layer_reg_wr(ctx, REG_LAY_CTRL, BIT_DPU_LAY_EN |
375 				format |
376 				blend |
377 				rotation,
378 				index);
379 }
380 
381 static void sprd_dpu_flip(struct sprd_dpu *dpu)
382 {
383 	struct dpu_context *ctx = &dpu->ctx;
384 
385 	/*
386 	 * Make sure the dpu is in stop status. DPU has no shadow
387 	 * registers in EDPI mode. So the config registers can only be
388 	 * updated in the rising edge of DPU_RUN bit.
389 	 */
390 	if (ctx->if_type == SPRD_DPU_IF_EDPI)
391 		dpu_wait_stop_done(dpu);
392 
393 	/* update trigger and wait */
394 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
395 		if (!ctx->stopped) {
396 			dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_REG_UPDATE);
397 			dpu_wait_update_done(dpu);
398 		}
399 
400 		dpu_reg_set(ctx, REG_DPU_INT_EN, BIT_DPU_INT_ERR);
401 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
402 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
403 
404 		ctx->stopped = false;
405 	}
406 }
407 
408 static void sprd_dpu_init(struct sprd_dpu *dpu)
409 {
410 	struct dpu_context *ctx = &dpu->ctx;
411 	u32 int_mask = 0;
412 
413 	writel(0x00, ctx->base + REG_BG_COLOR);
414 	writel(0x00, ctx->base + REG_MMU_EN);
415 	writel(0x00, ctx->base + REG_MMU_PPN1);
416 	writel(0xffff, ctx->base + REG_MMU_RANGE1);
417 	writel(0x00, ctx->base + REG_MMU_PPN2);
418 	writel(0xffff, ctx->base + REG_MMU_RANGE2);
419 	writel(0x1ffff, ctx->base + REG_MMU_VPN_RANGE);
420 
421 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
422 		/* use dpi as interface */
423 		dpu_reg_clr(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
424 		/* disable Halt function for SPRD DSI */
425 		dpu_reg_clr(ctx, REG_DPI_CTRL, BIT_DPU_DPI_HALT_EN);
426 		/* select te from external pad */
427 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
428 
429 		/* enable dpu update done INT */
430 		int_mask |= BIT_DPU_INT_UPDATE_DONE;
431 		/* enable dpu done INT */
432 		int_mask |= BIT_DPU_INT_DONE;
433 		/* enable dpu dpi vsync */
434 		int_mask |= BIT_DPU_INT_VSYNC;
435 		/* enable dpu TE INT */
436 		int_mask |= BIT_DPU_INT_TE;
437 		/* enable underflow err INT */
438 		int_mask |= BIT_DPU_INT_ERR;
439 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
440 		/* use edpi as interface */
441 		dpu_reg_set(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
442 		/* use external te */
443 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
444 		/* enable te */
445 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_TE_EN);
446 
447 		/* enable stop done INT */
448 		int_mask |= BIT_DPU_INT_DONE;
449 		/* enable TE INT */
450 		int_mask |= BIT_DPU_INT_TE;
451 	}
452 
453 	writel(int_mask, ctx->base + REG_DPU_INT_EN);
454 }
455 
456 static void sprd_dpu_fini(struct sprd_dpu *dpu)
457 {
458 	struct dpu_context *ctx = &dpu->ctx;
459 
460 	writel(0x00, ctx->base + REG_DPU_INT_EN);
461 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
462 }
463 
464 static void sprd_dpi_init(struct sprd_dpu *dpu)
465 {
466 	struct dpu_context *ctx = &dpu->ctx;
467 	u32 reg_val;
468 	u32 size;
469 
470 	size = (ctx->vm.vactive << 16) | ctx->vm.hactive;
471 	writel(size, ctx->base + REG_PANEL_SIZE);
472 	writel(size, ctx->base + REG_BLEND_SIZE);
473 
474 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
475 		/* set dpi timing */
476 		reg_val = ctx->vm.hsync_len << 0 |
477 			  ctx->vm.hback_porch << 8 |
478 			  ctx->vm.hfront_porch << 20;
479 		writel(reg_val, ctx->base + REG_DPI_H_TIMING);
480 
481 		reg_val = ctx->vm.vsync_len << 0 |
482 			  ctx->vm.vback_porch << 8 |
483 			  ctx->vm.vfront_porch << 20;
484 		writel(reg_val, ctx->base + REG_DPI_V_TIMING);
485 	}
486 }
487 
488 void sprd_dpu_run(struct sprd_dpu *dpu)
489 {
490 	struct dpu_context *ctx = &dpu->ctx;
491 
492 	dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
493 
494 	ctx->stopped = false;
495 }
496 
497 void sprd_dpu_stop(struct sprd_dpu *dpu)
498 {
499 	struct dpu_context *ctx = &dpu->ctx;
500 
501 	if (ctx->if_type == SPRD_DPU_IF_DPI)
502 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_STOP);
503 
504 	dpu_wait_stop_done(dpu);
505 }
506 
507 static int sprd_plane_atomic_check(struct drm_plane *plane,
508 				   struct drm_atomic_state *state)
509 {
510 	struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state,
511 									     plane);
512 	struct drm_crtc_state *crtc_state;
513 	u32 fmt;
514 
515 	if (!plane_state->fb || !plane_state->crtc)
516 		return 0;
517 
518 	fmt = drm_format_to_dpu(plane_state->fb);
519 	if (!fmt)
520 		return -EINVAL;
521 
522 	crtc_state = drm_atomic_get_crtc_state(plane_state->state, plane_state->crtc);
523 	if (IS_ERR(crtc_state))
524 		return PTR_ERR(crtc_state);
525 
526 	return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
527 						  DRM_PLANE_HELPER_NO_SCALING,
528 						  DRM_PLANE_HELPER_NO_SCALING,
529 						  true, true);
530 }
531 
532 static void sprd_plane_atomic_update(struct drm_plane *drm_plane,
533 				     struct drm_atomic_state *state)
534 {
535 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state,
536 									   drm_plane);
537 	struct sprd_dpu *dpu = to_sprd_crtc(new_state->crtc);
538 
539 	/* start configure dpu layers */
540 	sprd_dpu_layer(dpu, new_state);
541 }
542 
543 static void sprd_plane_atomic_disable(struct drm_plane *drm_plane,
544 				      struct drm_atomic_state *state)
545 {
546 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
547 									   drm_plane);
548 	struct sprd_dpu *dpu = to_sprd_crtc(old_state->crtc);
549 
550 	layer_reg_wr(&dpu->ctx, REG_LAY_CTRL, 0x00, old_state->zpos);
551 }
552 
553 static void sprd_plane_create_properties(struct sprd_plane *plane, int index)
554 {
555 	unsigned int supported_modes = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
556 				       BIT(DRM_MODE_BLEND_PREMULTI) |
557 				       BIT(DRM_MODE_BLEND_COVERAGE);
558 
559 	/* create rotation property */
560 	drm_plane_create_rotation_property(&plane->base,
561 					   DRM_MODE_ROTATE_0,
562 					   DRM_MODE_ROTATE_MASK |
563 					   DRM_MODE_REFLECT_MASK);
564 
565 	/* create alpha property */
566 	drm_plane_create_alpha_property(&plane->base);
567 
568 	/* create blend mode property */
569 	drm_plane_create_blend_mode_property(&plane->base, supported_modes);
570 
571 	/* create zpos property */
572 	drm_plane_create_zpos_immutable_property(&plane->base, index);
573 }
574 
575 static const struct drm_plane_helper_funcs sprd_plane_helper_funcs = {
576 	.atomic_check = sprd_plane_atomic_check,
577 	.atomic_update = sprd_plane_atomic_update,
578 	.atomic_disable = sprd_plane_atomic_disable,
579 };
580 
581 static const struct drm_plane_funcs sprd_plane_funcs = {
582 	.update_plane = drm_atomic_helper_update_plane,
583 	.disable_plane	= drm_atomic_helper_disable_plane,
584 	.destroy = drm_plane_cleanup,
585 	.reset = drm_atomic_helper_plane_reset,
586 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
587 	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
588 };
589 
590 static struct sprd_plane *sprd_planes_init(struct drm_device *drm)
591 {
592 	struct sprd_plane *plane, *primary;
593 	enum drm_plane_type plane_type;
594 	int i;
595 
596 	for (i = 0; i < 6; i++) {
597 		plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
598 					DRM_PLANE_TYPE_OVERLAY;
599 
600 		plane = drmm_universal_plane_alloc(drm, struct sprd_plane, base,
601 						   1, &sprd_plane_funcs,
602 						   layer_fmts, ARRAY_SIZE(layer_fmts),
603 						   NULL, plane_type, NULL);
604 		if (IS_ERR(plane)) {
605 			drm_err(drm, "failed to init drm plane: %d\n", i);
606 			return plane;
607 		}
608 
609 		drm_plane_helper_add(&plane->base, &sprd_plane_helper_funcs);
610 
611 		sprd_plane_create_properties(plane, i);
612 
613 		if (i == 0)
614 			primary = plane;
615 	}
616 
617 	return primary;
618 }
619 
620 static void sprd_crtc_mode_set_nofb(struct drm_crtc *crtc)
621 {
622 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
623 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
624 	struct drm_encoder *encoder;
625 	struct sprd_dsi *dsi;
626 
627 	drm_display_mode_to_videomode(mode, &dpu->ctx.vm);
628 
629 	drm_for_each_encoder_mask(encoder, crtc->dev,
630 				  crtc->state->encoder_mask) {
631 		dsi = encoder_to_dsi(encoder);
632 
633 		if (dsi->slave->mode_flags & MIPI_DSI_MODE_VIDEO)
634 			dpu->ctx.if_type = SPRD_DPU_IF_DPI;
635 		else
636 			dpu->ctx.if_type = SPRD_DPU_IF_EDPI;
637 	}
638 
639 	sprd_dpi_init(dpu);
640 }
641 
642 static void sprd_crtc_atomic_enable(struct drm_crtc *crtc,
643 				    struct drm_atomic_state *state)
644 {
645 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
646 
647 	sprd_dpu_init(dpu);
648 
649 	drm_crtc_vblank_on(&dpu->base);
650 }
651 
652 static void sprd_crtc_atomic_disable(struct drm_crtc *crtc,
653 				     struct drm_atomic_state *state)
654 {
655 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
656 	struct drm_device *drm = dpu->base.dev;
657 
658 	drm_crtc_vblank_off(&dpu->base);
659 
660 	sprd_dpu_fini(dpu);
661 
662 	spin_lock_irq(&drm->event_lock);
663 	if (crtc->state->event) {
664 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
665 		crtc->state->event = NULL;
666 	}
667 	spin_unlock_irq(&drm->event_lock);
668 }
669 
670 static void sprd_crtc_atomic_flush(struct drm_crtc *crtc,
671 				   struct drm_atomic_state *state)
672 
673 {
674 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
675 	struct drm_device *drm = dpu->base.dev;
676 
677 	sprd_dpu_flip(dpu);
678 
679 	spin_lock_irq(&drm->event_lock);
680 	if (crtc->state->event) {
681 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
682 		crtc->state->event = NULL;
683 	}
684 	spin_unlock_irq(&drm->event_lock);
685 }
686 
687 static int sprd_crtc_enable_vblank(struct drm_crtc *crtc)
688 {
689 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
690 
691 	dpu_reg_set(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
692 
693 	return 0;
694 }
695 
696 static void sprd_crtc_disable_vblank(struct drm_crtc *crtc)
697 {
698 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
699 
700 	dpu_reg_clr(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
701 }
702 
703 static const struct drm_crtc_helper_funcs sprd_crtc_helper_funcs = {
704 	.mode_set_nofb	= sprd_crtc_mode_set_nofb,
705 	.atomic_flush	= sprd_crtc_atomic_flush,
706 	.atomic_enable	= sprd_crtc_atomic_enable,
707 	.atomic_disable	= sprd_crtc_atomic_disable,
708 };
709 
710 static const struct drm_crtc_funcs sprd_crtc_funcs = {
711 	.destroy	= drm_crtc_cleanup,
712 	.set_config	= drm_atomic_helper_set_config,
713 	.page_flip	= drm_atomic_helper_page_flip,
714 	.reset		= drm_atomic_helper_crtc_reset,
715 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
716 	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
717 	.enable_vblank	= sprd_crtc_enable_vblank,
718 	.disable_vblank	= sprd_crtc_disable_vblank,
719 };
720 
721 static struct sprd_dpu *sprd_crtc_init(struct drm_device *drm,
722 				       struct drm_plane *primary, struct device *dev)
723 {
724 	struct device_node *port;
725 	struct sprd_dpu *dpu;
726 
727 	dpu = drmm_crtc_alloc_with_planes(drm, struct sprd_dpu, base,
728 					  primary, NULL,
729 					&sprd_crtc_funcs, NULL);
730 	if (IS_ERR(dpu)) {
731 		drm_err(drm, "failed to init crtc\n");
732 		return dpu;
733 	}
734 	drm_crtc_helper_add(&dpu->base, &sprd_crtc_helper_funcs);
735 
736 	/*
737 	 * set crtc port so that drm_of_find_possible_crtcs call works
738 	 */
739 	port = of_graph_get_port_by_id(dev->of_node, 0);
740 	if (!port) {
741 		drm_err(drm, "failed to found crtc output port for %s\n",
742 			dev->of_node->full_name);
743 		return ERR_PTR(-EINVAL);
744 	}
745 	dpu->base.port = port;
746 	of_node_put(port);
747 
748 	return dpu;
749 }
750 
751 static irqreturn_t sprd_dpu_isr(int irq, void *data)
752 {
753 	struct sprd_dpu *dpu = data;
754 	struct dpu_context *ctx = &dpu->ctx;
755 	u32 reg_val, int_mask = 0;
756 
757 	reg_val = readl(ctx->base + REG_DPU_INT_STS);
758 
759 	/* disable err interrupt */
760 	if (reg_val & BIT_DPU_INT_ERR) {
761 		int_mask |= BIT_DPU_INT_ERR;
762 		drm_warn(dpu->drm, "Warning: dpu underflow!\n");
763 	}
764 
765 	/* dpu update done isr */
766 	if (reg_val & BIT_DPU_INT_UPDATE_DONE) {
767 		ctx->evt_update = true;
768 		wake_up_interruptible_all(&ctx->wait_queue);
769 	}
770 
771 	/* dpu stop done isr */
772 	if (reg_val & BIT_DPU_INT_DONE) {
773 		ctx->evt_stop = true;
774 		wake_up_interruptible_all(&ctx->wait_queue);
775 	}
776 
777 	if (reg_val & BIT_DPU_INT_VSYNC)
778 		drm_crtc_handle_vblank(&dpu->base);
779 
780 	writel(reg_val, ctx->base + REG_DPU_INT_CLR);
781 	dpu_reg_clr(ctx, REG_DPU_INT_EN, int_mask);
782 
783 	return IRQ_HANDLED;
784 }
785 
786 static int sprd_dpu_context_init(struct sprd_dpu *dpu,
787 				 struct device *dev)
788 {
789 	struct platform_device *pdev = to_platform_device(dev);
790 	struct dpu_context *ctx = &dpu->ctx;
791 	struct resource *res;
792 	int ret;
793 
794 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
795 	if (!res) {
796 		dev_err(dev, "failed to get I/O resource\n");
797 		return -EINVAL;
798 	}
799 
800 	ctx->base = devm_ioremap(dev, res->start, resource_size(res));
801 	if (!ctx->base) {
802 		dev_err(dev, "failed to map dpu registers\n");
803 		return -EFAULT;
804 	}
805 
806 	ctx->irq = platform_get_irq(pdev, 0);
807 	if (ctx->irq < 0) {
808 		dev_err(dev, "failed to get dpu irq\n");
809 		return ctx->irq;
810 	}
811 
812 	/* disable and clear interrupts before register dpu IRQ. */
813 	writel(0x00, ctx->base + REG_DPU_INT_EN);
814 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
815 
816 	ret = devm_request_irq(dev, ctx->irq, sprd_dpu_isr,
817 			       IRQF_TRIGGER_NONE, "DPU", dpu);
818 	if (ret) {
819 		dev_err(dev, "failed to register dpu irq handler\n");
820 		return ret;
821 	}
822 
823 	init_waitqueue_head(&ctx->wait_queue);
824 
825 	return 0;
826 }
827 
828 static int sprd_dpu_bind(struct device *dev, struct device *master, void *data)
829 {
830 	struct drm_device *drm = data;
831 	struct sprd_dpu *dpu;
832 	struct sprd_plane *plane;
833 	int ret;
834 
835 	plane = sprd_planes_init(drm);
836 	if (IS_ERR(plane))
837 		return PTR_ERR(plane);
838 
839 	dpu = sprd_crtc_init(drm, &plane->base, dev);
840 	if (IS_ERR(dpu))
841 		return PTR_ERR(dpu);
842 
843 	dpu->drm = drm;
844 	dev_set_drvdata(dev, dpu);
845 
846 	ret = sprd_dpu_context_init(dpu, dev);
847 	if (ret)
848 		return ret;
849 
850 	return 0;
851 }
852 
853 static const struct component_ops dpu_component_ops = {
854 	.bind = sprd_dpu_bind,
855 };
856 
857 static const struct of_device_id dpu_match_table[] = {
858 	{ .compatible = "sprd,sharkl3-dpu" },
859 	{ /* sentinel */ },
860 };
861 MODULE_DEVICE_TABLE(of, dpu_match_table);
862 
863 static int sprd_dpu_probe(struct platform_device *pdev)
864 {
865 	return component_add(&pdev->dev, &dpu_component_ops);
866 }
867 
868 static int sprd_dpu_remove(struct platform_device *pdev)
869 {
870 	component_del(&pdev->dev, &dpu_component_ops);
871 
872 	return 0;
873 }
874 
875 struct platform_driver sprd_dpu_driver = {
876 	.probe = sprd_dpu_probe,
877 	.remove = sprd_dpu_remove,
878 	.driver = {
879 		.name = "sprd-dpu-drv",
880 		.of_match_table = dpu_match_table,
881 	},
882 };
883 
884 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
885 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
886 MODULE_DESCRIPTION("Unisoc Display Controller Driver");
887 MODULE_LICENSE("GPL v2");
888