xref: /linux/drivers/gpu/drm/xlnx/zynqmp_kms.c (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * ZynqMP DisplayPort Subsystem - KMS API
4  *
5  * Copyright (C) 2017 - 2021 Xilinx, Inc.
6  *
7  * Authors:
8  * - Hyun Woo Kwon <hyun.kwon@xilinx.com>
9  * - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
10  */
11 
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_blend.h>
15 #include <drm/drm_bridge.h>
16 #include <drm/drm_bridge_connector.h>
17 #include <drm/drm_client_setup.h>
18 #include <drm/drm_connector.h>
19 #include <drm/drm_crtc.h>
20 #include <drm/drm_device.h>
21 #include <drm/drm_drv.h>
22 #include <drm/drm_encoder.h>
23 #include <drm/drm_fbdev_dma.h>
24 #include <drm/drm_fourcc.h>
25 #include <drm/drm_framebuffer.h>
26 #include <drm/drm_gem_dma_helper.h>
27 #include <drm/drm_gem_framebuffer_helper.h>
28 #include <drm/drm_managed.h>
29 #include <drm/drm_mode_config.h>
30 #include <drm/drm_plane.h>
31 #include <drm/drm_probe_helper.h>
32 #include <drm/drm_simple_kms_helper.h>
33 #include <drm/drm_vblank.h>
34 
35 #include <linux/clk.h>
36 #include <linux/delay.h>
37 #include <linux/pm_runtime.h>
38 #include <linux/spinlock.h>
39 
40 #include "zynqmp_disp.h"
41 #include "zynqmp_dp.h"
42 #include "zynqmp_dpsub.h"
43 #include "zynqmp_kms.h"
44 
45 static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
46 {
47 	return container_of(drm, struct zynqmp_dpsub_drm, dev)->dpsub;
48 }
49 
50 /* -----------------------------------------------------------------------------
51  * DRM Planes
52  */
53 
54 static int zynqmp_dpsub_plane_atomic_check(struct drm_plane *plane,
55 					   struct drm_atomic_state *state)
56 {
57 	struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
58 										 plane);
59 	struct drm_crtc_state *crtc_state;
60 
61 	if (!new_plane_state->crtc)
62 		return 0;
63 
64 	crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc);
65 	if (IS_ERR(crtc_state))
66 		return PTR_ERR(crtc_state);
67 
68 	return drm_atomic_helper_check_plane_state(new_plane_state,
69 						   crtc_state,
70 						   DRM_PLANE_NO_SCALING,
71 						   DRM_PLANE_NO_SCALING,
72 						   false, false);
73 }
74 
75 static void zynqmp_dpsub_plane_atomic_disable(struct drm_plane *plane,
76 					      struct drm_atomic_state *state)
77 {
78 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
79 									   plane);
80 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
81 	struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
82 
83 	if (!old_state->fb)
84 		return;
85 
86 	zynqmp_disp_layer_disable(layer);
87 
88 	if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
89 		zynqmp_disp_blend_set_global_alpha(dpsub->disp, false,
90 						   plane->state->alpha >> 8);
91 }
92 
93 static void zynqmp_dpsub_plane_atomic_update(struct drm_plane *plane,
94 					     struct drm_atomic_state *state)
95 {
96 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
97 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
98 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
99 	struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
100 	bool format_changed = false;
101 
102 	if (!old_state->fb ||
103 	    old_state->fb->format->format != new_state->fb->format->format)
104 		format_changed = true;
105 
106 	/*
107 	 * If the format has changed (including going from a previously
108 	 * disabled state to any format), reconfigure the format. Disable the
109 	 * plane first if needed.
110 	 */
111 	if (format_changed) {
112 		if (old_state->fb)
113 			zynqmp_disp_layer_disable(layer);
114 
115 		zynqmp_disp_layer_set_format(layer, new_state->fb->format);
116 	}
117 
118 	zynqmp_disp_layer_update(layer, new_state);
119 
120 	if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
121 		zynqmp_disp_blend_set_global_alpha(dpsub->disp, true,
122 						   plane->state->alpha >> 8);
123 
124 	/*
125 	 * Unconditionally enable the layer, as it may have been disabled
126 	 * previously either explicitly to reconfigure layer format, or
127 	 * implicitly after DPSUB reset during display mode change. DRM
128 	 * framework calls this callback for enabled planes only.
129 	 */
130 	zynqmp_disp_layer_enable(layer);
131 }
132 
133 static const struct drm_plane_helper_funcs zynqmp_dpsub_plane_helper_funcs = {
134 	.atomic_check		= zynqmp_dpsub_plane_atomic_check,
135 	.atomic_update		= zynqmp_dpsub_plane_atomic_update,
136 	.atomic_disable		= zynqmp_dpsub_plane_atomic_disable,
137 };
138 
139 static const struct drm_plane_funcs zynqmp_dpsub_plane_funcs = {
140 	.update_plane		= drm_atomic_helper_update_plane,
141 	.disable_plane		= drm_atomic_helper_disable_plane,
142 	.destroy		= drm_plane_cleanup,
143 	.reset			= drm_atomic_helper_plane_reset,
144 	.atomic_duplicate_state	= drm_atomic_helper_plane_duplicate_state,
145 	.atomic_destroy_state	= drm_atomic_helper_plane_destroy_state,
146 };
147 
148 static int zynqmp_dpsub_create_planes(struct zynqmp_dpsub *dpsub)
149 {
150 	unsigned int i;
151 	int ret;
152 
153 	for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++) {
154 		struct zynqmp_disp_layer *layer = dpsub->layers[i];
155 		struct drm_plane *plane = &dpsub->drm->planes[i];
156 		enum drm_plane_type type;
157 		unsigned int num_formats;
158 		u32 *formats;
159 
160 		formats = zynqmp_disp_layer_drm_formats(layer, &num_formats);
161 		if (!formats)
162 			return -ENOMEM;
163 
164 		/* Graphics layer is primary, and video layer is overlay. */
165 		type = i == ZYNQMP_DPSUB_LAYER_VID
166 		     ? DRM_PLANE_TYPE_OVERLAY : DRM_PLANE_TYPE_PRIMARY;
167 		ret = drm_universal_plane_init(&dpsub->drm->dev, plane, 0,
168 					       &zynqmp_dpsub_plane_funcs,
169 					       formats, num_formats,
170 					       NULL, type, NULL);
171 		kfree(formats);
172 		if (ret)
173 			return ret;
174 
175 		drm_plane_helper_add(plane, &zynqmp_dpsub_plane_helper_funcs);
176 
177 		drm_plane_create_zpos_immutable_property(plane, i);
178 		if (i == ZYNQMP_DPSUB_LAYER_GFX)
179 			drm_plane_create_alpha_property(plane);
180 	}
181 
182 	return 0;
183 }
184 
185 /* -----------------------------------------------------------------------------
186  * DRM CRTC
187  */
188 
189 static inline struct zynqmp_dpsub *crtc_to_dpsub(struct drm_crtc *crtc)
190 {
191 	return container_of(crtc, struct zynqmp_dpsub_drm, crtc)->dpsub;
192 }
193 
194 static void zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc *crtc,
195 					    struct drm_atomic_state *state)
196 {
197 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
198 	struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
199 	int ret, vrefresh;
200 
201 	pm_runtime_get_sync(dpsub->dev);
202 
203 	zynqmp_disp_setup_clock(dpsub->disp, adjusted_mode->clock * 1000);
204 
205 	ret = clk_prepare_enable(dpsub->vid_clk);
206 	if (ret) {
207 		dev_err(dpsub->dev, "failed to enable a pixel clock\n");
208 		pm_runtime_put_sync(dpsub->dev);
209 		return;
210 	}
211 
212 	zynqmp_disp_enable(dpsub->disp);
213 
214 	/* Delay of 3 vblank intervals for timing gen to be stable */
215 	vrefresh = (adjusted_mode->clock * 1000) /
216 		   (adjusted_mode->vtotal * adjusted_mode->htotal);
217 	msleep(3 * 1000 / vrefresh);
218 }
219 
220 static void zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc *crtc,
221 					     struct drm_atomic_state *state)
222 {
223 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
224 	struct drm_plane_state *old_plane_state;
225 
226 	/*
227 	 * Disable the plane if active. The old plane state can be NULL in the
228 	 * .shutdown() path if the plane is already disabled, skip
229 	 * zynqmp_disp_plane_atomic_disable() in that case.
230 	 */
231 	old_plane_state = drm_atomic_get_old_plane_state(state, crtc->primary);
232 	if (old_plane_state)
233 		zynqmp_dpsub_plane_atomic_disable(crtc->primary, state);
234 
235 	zynqmp_disp_disable(dpsub->disp);
236 
237 	drm_crtc_vblank_off(crtc);
238 
239 	spin_lock_irq(&crtc->dev->event_lock);
240 	if (crtc->state->event) {
241 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
242 		crtc->state->event = NULL;
243 	}
244 	spin_unlock_irq(&crtc->dev->event_lock);
245 
246 	clk_disable_unprepare(dpsub->vid_clk);
247 	pm_runtime_put_sync(dpsub->dev);
248 }
249 
250 static int zynqmp_dpsub_crtc_atomic_check(struct drm_crtc *crtc,
251 					  struct drm_atomic_state *state)
252 {
253 	return drm_atomic_add_affected_planes(state, crtc);
254 }
255 
256 static void zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc *crtc,
257 					   struct drm_atomic_state *state)
258 {
259 	drm_crtc_vblank_on(crtc);
260 }
261 
262 static void zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc *crtc,
263 					   struct drm_atomic_state *state)
264 {
265 	if (crtc->state->event) {
266 		struct drm_pending_vblank_event *event;
267 
268 		/* Consume the flip_done event from atomic helper. */
269 		event = crtc->state->event;
270 		crtc->state->event = NULL;
271 
272 		event->pipe = drm_crtc_index(crtc);
273 
274 		WARN_ON(drm_crtc_vblank_get(crtc) != 0);
275 
276 		spin_lock_irq(&crtc->dev->event_lock);
277 		drm_crtc_arm_vblank_event(crtc, event);
278 		spin_unlock_irq(&crtc->dev->event_lock);
279 	}
280 }
281 
282 static const struct drm_crtc_helper_funcs zynqmp_dpsub_crtc_helper_funcs = {
283 	.atomic_enable	= zynqmp_dpsub_crtc_atomic_enable,
284 	.atomic_disable	= zynqmp_dpsub_crtc_atomic_disable,
285 	.atomic_check	= zynqmp_dpsub_crtc_atomic_check,
286 	.atomic_begin	= zynqmp_dpsub_crtc_atomic_begin,
287 	.atomic_flush	= zynqmp_dpsub_crtc_atomic_flush,
288 };
289 
290 static int zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc *crtc)
291 {
292 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
293 
294 	zynqmp_dp_enable_vblank(dpsub->dp);
295 
296 	return 0;
297 }
298 
299 static void zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc *crtc)
300 {
301 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
302 
303 	zynqmp_dp_disable_vblank(dpsub->dp);
304 }
305 
306 static const struct drm_crtc_funcs zynqmp_dpsub_crtc_funcs = {
307 	.destroy		= drm_crtc_cleanup,
308 	.set_config		= drm_atomic_helper_set_config,
309 	.page_flip		= drm_atomic_helper_page_flip,
310 	.reset			= drm_atomic_helper_crtc_reset,
311 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
312 	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
313 	.enable_vblank		= zynqmp_dpsub_crtc_enable_vblank,
314 	.disable_vblank		= zynqmp_dpsub_crtc_disable_vblank,
315 };
316 
317 static int zynqmp_dpsub_create_crtc(struct zynqmp_dpsub *dpsub)
318 {
319 	struct drm_plane *plane = &dpsub->drm->planes[ZYNQMP_DPSUB_LAYER_GFX];
320 	struct drm_crtc *crtc = &dpsub->drm->crtc;
321 	int ret;
322 
323 	ret = drm_crtc_init_with_planes(&dpsub->drm->dev, crtc, plane,
324 					NULL, &zynqmp_dpsub_crtc_funcs, NULL);
325 	if (ret < 0)
326 		return ret;
327 
328 	drm_crtc_helper_add(crtc, &zynqmp_dpsub_crtc_helper_funcs);
329 
330 	/* Start with vertical blanking interrupt reporting disabled. */
331 	drm_crtc_vblank_off(crtc);
332 
333 	return 0;
334 }
335 
336 static void zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub *dpsub)
337 {
338 	u32 possible_crtcs = drm_crtc_mask(&dpsub->drm->crtc);
339 	unsigned int i;
340 
341 	for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++)
342 		dpsub->drm->planes[i].possible_crtcs = possible_crtcs;
343 }
344 
345 /**
346  * zynqmp_dpsub_drm_handle_vblank - Handle the vblank event
347  * @dpsub: DisplayPort subsystem
348  *
349  * This function handles the vblank interrupt, and sends an event to
350  * CRTC object. This will be called by the DP vblank interrupt handler.
351  */
352 void zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub *dpsub)
353 {
354 	drm_crtc_handle_vblank(&dpsub->drm->crtc);
355 }
356 
357 /* -----------------------------------------------------------------------------
358  * Dumb Buffer & Framebuffer Allocation
359  */
360 
361 static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
362 				    struct drm_device *drm,
363 				    struct drm_mode_create_dumb *args)
364 {
365 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
366 	unsigned int pitch = DIV_ROUND_UP(args->width * args->bpp, 8);
367 
368 	/* Enforce the alignment constraints of the DMA engine. */
369 	args->pitch = ALIGN(pitch, dpsub->dma_align);
370 
371 	return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
372 }
373 
374 static struct drm_framebuffer *
375 zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
376 		       const struct drm_mode_fb_cmd2 *mode_cmd)
377 {
378 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
379 	struct drm_mode_fb_cmd2 cmd = *mode_cmd;
380 	unsigned int i;
381 
382 	/* Enforce the alignment constraints of the DMA engine. */
383 	for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
384 		cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
385 
386 	return drm_gem_fb_create(drm, file_priv, &cmd);
387 }
388 
389 static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
390 	.fb_create		= zynqmp_dpsub_fb_create,
391 	.atomic_check		= drm_atomic_helper_check,
392 	.atomic_commit		= drm_atomic_helper_commit,
393 };
394 
395 /* -----------------------------------------------------------------------------
396  * DRM/KMS Driver
397  */
398 
399 DEFINE_DRM_GEM_DMA_FOPS(zynqmp_dpsub_drm_fops);
400 
401 static const struct drm_driver zynqmp_dpsub_drm_driver = {
402 	.driver_features		= DRIVER_MODESET | DRIVER_GEM |
403 					  DRIVER_ATOMIC,
404 
405 	DRM_GEM_DMA_DRIVER_OPS_WITH_DUMB_CREATE(zynqmp_dpsub_dumb_create),
406 	DRM_FBDEV_DMA_DRIVER_OPS,
407 
408 	.fops				= &zynqmp_dpsub_drm_fops,
409 
410 	.name				= "zynqmp-dpsub",
411 	.desc				= "Xilinx DisplayPort Subsystem Driver",
412 	.date				= "20130509",
413 	.major				= 1,
414 	.minor				= 0,
415 };
416 
417 static int zynqmp_dpsub_kms_init(struct zynqmp_dpsub *dpsub)
418 {
419 	struct drm_encoder *encoder = &dpsub->drm->encoder;
420 	struct drm_connector *connector;
421 	int ret;
422 
423 	/* Create the planes and the CRTC. */
424 	ret = zynqmp_dpsub_create_planes(dpsub);
425 	if (ret)
426 		return ret;
427 
428 	ret = zynqmp_dpsub_create_crtc(dpsub);
429 	if (ret < 0)
430 		return ret;
431 
432 	zynqmp_dpsub_map_crtc_to_plane(dpsub);
433 
434 	/* Create the encoder and attach the bridge. */
435 	encoder->possible_crtcs |= drm_crtc_mask(&dpsub->drm->crtc);
436 	drm_simple_encoder_init(&dpsub->drm->dev, encoder, DRM_MODE_ENCODER_NONE);
437 
438 	ret = drm_bridge_attach(encoder, dpsub->bridge, NULL,
439 				DRM_BRIDGE_ATTACH_NO_CONNECTOR);
440 	if (ret) {
441 		dev_err(dpsub->dev, "failed to attach bridge to encoder\n");
442 		goto err_encoder;
443 	}
444 
445 	/* Create the connector for the chain of bridges. */
446 	connector = drm_bridge_connector_init(&dpsub->drm->dev, encoder);
447 	if (IS_ERR(connector)) {
448 		dev_err(dpsub->dev, "failed to created connector\n");
449 		ret = PTR_ERR(connector);
450 		goto err_encoder;
451 	}
452 
453 	ret = drm_connector_attach_encoder(connector, encoder);
454 	if (ret < 0) {
455 		dev_err(dpsub->dev, "failed to attach connector to encoder\n");
456 		goto err_encoder;
457 	}
458 
459 	return 0;
460 
461 err_encoder:
462 	drm_encoder_cleanup(encoder);
463 	return ret;
464 }
465 
466 static void zynqmp_dpsub_drm_release(struct drm_device *drm, void *res)
467 {
468 	struct zynqmp_dpsub_drm *dpdrm = res;
469 
470 	zynqmp_dpsub_release(dpdrm->dpsub);
471 }
472 
473 int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
474 {
475 	struct zynqmp_dpsub_drm *dpdrm;
476 	struct drm_device *drm;
477 	int ret;
478 
479 	/*
480 	 * Allocate the drm_device and immediately add a cleanup action to
481 	 * release the zynqmp_dpsub instance. If any of those operations fail,
482 	 * dpsub->drm will remain NULL, which tells the caller that it must
483 	 * cleanup manually.
484 	 */
485 	dpdrm = devm_drm_dev_alloc(dpsub->dev, &zynqmp_dpsub_drm_driver,
486 				   struct zynqmp_dpsub_drm, dev);
487 	if (IS_ERR(dpdrm))
488 		return PTR_ERR(dpdrm);
489 
490 	dpdrm->dpsub = dpsub;
491 	drm = &dpdrm->dev;
492 
493 	ret = drmm_add_action(drm, zynqmp_dpsub_drm_release, dpdrm);
494 	if (ret < 0)
495 		return ret;
496 
497 	dpsub->drm = dpdrm;
498 
499 	/* Initialize mode config, vblank and the KMS poll helper. */
500 	ret = drmm_mode_config_init(drm);
501 	if (ret < 0)
502 		return ret;
503 
504 	drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
505 	drm->mode_config.min_width = 0;
506 	drm->mode_config.min_height = 0;
507 	drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
508 	drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
509 
510 	ret = drm_vblank_init(drm, 1);
511 	if (ret)
512 		return ret;
513 
514 	ret = zynqmp_dpsub_kms_init(dpsub);
515 	if (ret < 0)
516 		goto err_poll_fini;
517 
518 	drm_kms_helper_poll_init(drm);
519 
520 	/* Reset all components and register the DRM device. */
521 	drm_mode_config_reset(drm);
522 
523 	ret = drm_dev_register(drm, 0);
524 	if (ret < 0)
525 		goto err_poll_fini;
526 
527 	/* Initialize fbdev generic emulation. */
528 	drm_client_setup_with_fourcc(drm, DRM_FORMAT_RGB888);
529 
530 	return 0;
531 
532 err_poll_fini:
533 	drm_kms_helper_poll_fini(drm);
534 	return ret;
535 }
536 
537 void zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub *dpsub)
538 {
539 	struct drm_device *drm = &dpsub->drm->dev;
540 
541 	drm_dev_unplug(drm);
542 	drm_atomic_helper_shutdown(drm);
543 	drm_encoder_cleanup(&dpsub->drm->encoder);
544 	drm_kms_helper_poll_fini(drm);
545 }
546