xref: /linux/drivers/gpu/drm/xlnx/zynqmp_kms.c (revision f6e8dc9edf963dbc99085e54f6ced6da9daa6100)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * ZynqMP DisplayPort Subsystem - KMS API
4  *
5  * Copyright (C) 2017 - 2021 Xilinx, Inc.
6  *
7  * Authors:
8  * - Hyun Woo Kwon <hyun.kwon@xilinx.com>
9  * - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
10  */
11 
12 #include <drm/clients/drm_client_setup.h>
13 #include <drm/drm_atomic.h>
14 #include <drm/drm_atomic_helper.h>
15 #include <drm/drm_blend.h>
16 #include <drm/drm_bridge.h>
17 #include <drm/drm_bridge_connector.h>
18 #include <drm/drm_connector.h>
19 #include <drm/drm_crtc.h>
20 #include <drm/drm_device.h>
21 #include <drm/drm_drv.h>
22 #include <drm/drm_dumb_buffers.h>
23 #include <drm/drm_encoder.h>
24 #include <drm/drm_fbdev_dma.h>
25 #include <drm/drm_fourcc.h>
26 #include <drm/drm_framebuffer.h>
27 #include <drm/drm_gem_dma_helper.h>
28 #include <drm/drm_gem_framebuffer_helper.h>
29 #include <drm/drm_managed.h>
30 #include <drm/drm_mode_config.h>
31 #include <drm/drm_plane.h>
32 #include <drm/drm_probe_helper.h>
33 #include <drm/drm_simple_kms_helper.h>
34 #include <drm/drm_vblank.h>
35 
36 #include <linux/clk.h>
37 #include <linux/delay.h>
38 #include <linux/pm_runtime.h>
39 #include <linux/spinlock.h>
40 
41 #include "zynqmp_disp.h"
42 #include "zynqmp_dp.h"
43 #include "zynqmp_dpsub.h"
44 #include "zynqmp_kms.h"
45 
46 static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
47 {
48 	return container_of(drm, struct zynqmp_dpsub_drm, dev)->dpsub;
49 }
50 
51 /* -----------------------------------------------------------------------------
52  * DRM Planes
53  */
54 
55 static int zynqmp_dpsub_plane_atomic_check(struct drm_plane *plane,
56 					   struct drm_atomic_state *state)
57 {
58 	struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
59 										 plane);
60 	struct drm_crtc_state *crtc_state;
61 
62 	if (!new_plane_state->crtc)
63 		return 0;
64 
65 	crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc);
66 	if (IS_ERR(crtc_state))
67 		return PTR_ERR(crtc_state);
68 
69 	return drm_atomic_helper_check_plane_state(new_plane_state,
70 						   crtc_state,
71 						   DRM_PLANE_NO_SCALING,
72 						   DRM_PLANE_NO_SCALING,
73 						   false, false);
74 }
75 
76 static void zynqmp_dpsub_plane_atomic_disable(struct drm_plane *plane,
77 					      struct drm_atomic_state *state)
78 {
79 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
80 									   plane);
81 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
82 	struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
83 
84 	if (!old_state->fb)
85 		return;
86 
87 	zynqmp_disp_layer_disable(layer);
88 
89 	if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
90 		zynqmp_disp_blend_set_global_alpha(dpsub->disp, false,
91 						   plane->state->alpha >> 8);
92 }
93 
94 static void zynqmp_dpsub_plane_atomic_update(struct drm_plane *plane,
95 					     struct drm_atomic_state *state)
96 {
97 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
98 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
99 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
100 	struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
101 	bool format_changed = false;
102 
103 	if (!old_state->fb ||
104 	    old_state->fb->format->format != new_state->fb->format->format)
105 		format_changed = true;
106 
107 	/*
108 	 * If the format has changed (including going from a previously
109 	 * disabled state to any format), reconfigure the format. Disable the
110 	 * plane first if needed.
111 	 */
112 	if (format_changed) {
113 		if (old_state->fb)
114 			zynqmp_disp_layer_disable(layer);
115 
116 		zynqmp_disp_layer_set_format(layer, new_state->fb->format);
117 	}
118 
119 	zynqmp_disp_layer_update(layer, new_state);
120 
121 	if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
122 		zynqmp_disp_blend_set_global_alpha(dpsub->disp, true,
123 						   plane->state->alpha >> 8);
124 
125 	/*
126 	 * Unconditionally enable the layer, as it may have been disabled
127 	 * previously either explicitly to reconfigure layer format, or
128 	 * implicitly after DPSUB reset during display mode change. DRM
129 	 * framework calls this callback for enabled planes only.
130 	 */
131 	zynqmp_disp_layer_enable(layer);
132 }
133 
134 static const struct drm_plane_helper_funcs zynqmp_dpsub_plane_helper_funcs = {
135 	.atomic_check		= zynqmp_dpsub_plane_atomic_check,
136 	.atomic_update		= zynqmp_dpsub_plane_atomic_update,
137 	.atomic_disable		= zynqmp_dpsub_plane_atomic_disable,
138 };
139 
140 static const struct drm_plane_funcs zynqmp_dpsub_plane_funcs = {
141 	.update_plane		= drm_atomic_helper_update_plane,
142 	.disable_plane		= drm_atomic_helper_disable_plane,
143 	.destroy		= drm_plane_cleanup,
144 	.reset			= drm_atomic_helper_plane_reset,
145 	.atomic_duplicate_state	= drm_atomic_helper_plane_duplicate_state,
146 	.atomic_destroy_state	= drm_atomic_helper_plane_destroy_state,
147 };
148 
149 static int zynqmp_dpsub_create_planes(struct zynqmp_dpsub *dpsub)
150 {
151 	unsigned int i;
152 	int ret;
153 
154 	for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++) {
155 		struct zynqmp_disp_layer *layer = dpsub->layers[i];
156 		struct drm_plane *plane = &dpsub->drm->planes[i];
157 		enum drm_plane_type type;
158 		unsigned int num_formats;
159 		u32 *formats;
160 
161 		formats = zynqmp_disp_layer_drm_formats(layer, &num_formats);
162 		if (!formats)
163 			return -ENOMEM;
164 
165 		/* Graphics layer is primary, and video layer is overlay. */
166 		type = i == ZYNQMP_DPSUB_LAYER_VID
167 		     ? DRM_PLANE_TYPE_OVERLAY : DRM_PLANE_TYPE_PRIMARY;
168 		ret = drm_universal_plane_init(&dpsub->drm->dev, plane, 0,
169 					       &zynqmp_dpsub_plane_funcs,
170 					       formats, num_formats,
171 					       NULL, type, NULL);
172 		kfree(formats);
173 		if (ret)
174 			return ret;
175 
176 		drm_plane_helper_add(plane, &zynqmp_dpsub_plane_helper_funcs);
177 
178 		drm_plane_create_zpos_immutable_property(plane, i);
179 		if (i == ZYNQMP_DPSUB_LAYER_GFX)
180 			drm_plane_create_alpha_property(plane);
181 	}
182 
183 	return 0;
184 }
185 
186 /* -----------------------------------------------------------------------------
187  * DRM CRTC
188  */
189 
190 static inline struct zynqmp_dpsub *crtc_to_dpsub(struct drm_crtc *crtc)
191 {
192 	return container_of(crtc, struct zynqmp_dpsub_drm, crtc)->dpsub;
193 }
194 
195 static void zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc *crtc,
196 					    struct drm_atomic_state *state)
197 {
198 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
199 	struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
200 	int ret, vrefresh;
201 
202 	pm_runtime_get_sync(dpsub->dev);
203 
204 	zynqmp_disp_setup_clock(dpsub->disp, adjusted_mode->clock * 1000);
205 
206 	ret = clk_prepare_enable(dpsub->vid_clk);
207 	if (ret) {
208 		dev_err(dpsub->dev, "failed to enable a pixel clock\n");
209 		pm_runtime_put_sync(dpsub->dev);
210 		return;
211 	}
212 
213 	zynqmp_disp_enable(dpsub->disp);
214 
215 	/* Delay of 3 vblank intervals for timing gen to be stable */
216 	vrefresh = (adjusted_mode->clock * 1000) /
217 		   (adjusted_mode->vtotal * adjusted_mode->htotal);
218 	msleep(3 * 1000 / vrefresh);
219 }
220 
221 static void zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc *crtc,
222 					     struct drm_atomic_state *state)
223 {
224 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
225 	struct drm_plane_state *old_plane_state;
226 
227 	/*
228 	 * Disable the plane if active. The old plane state can be NULL in the
229 	 * .shutdown() path if the plane is already disabled, skip
230 	 * zynqmp_disp_plane_atomic_disable() in that case.
231 	 */
232 	old_plane_state = drm_atomic_get_old_plane_state(state, crtc->primary);
233 	if (old_plane_state)
234 		zynqmp_dpsub_plane_atomic_disable(crtc->primary, state);
235 
236 	zynqmp_disp_disable(dpsub->disp);
237 
238 	drm_crtc_vblank_off(crtc);
239 
240 	spin_lock_irq(&crtc->dev->event_lock);
241 	if (crtc->state->event) {
242 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
243 		crtc->state->event = NULL;
244 	}
245 	spin_unlock_irq(&crtc->dev->event_lock);
246 
247 	clk_disable_unprepare(dpsub->vid_clk);
248 	pm_runtime_put_sync(dpsub->dev);
249 }
250 
251 static int zynqmp_dpsub_crtc_atomic_check(struct drm_crtc *crtc,
252 					  struct drm_atomic_state *state)
253 {
254 	return drm_atomic_add_affected_planes(state, crtc);
255 }
256 
257 static void zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc *crtc,
258 					   struct drm_atomic_state *state)
259 {
260 	drm_crtc_vblank_on(crtc);
261 }
262 
263 static void zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc *crtc,
264 					   struct drm_atomic_state *state)
265 {
266 	if (crtc->state->event) {
267 		struct drm_pending_vblank_event *event;
268 
269 		/* Consume the flip_done event from atomic helper. */
270 		event = crtc->state->event;
271 		crtc->state->event = NULL;
272 
273 		event->pipe = drm_crtc_index(crtc);
274 
275 		WARN_ON(drm_crtc_vblank_get(crtc) != 0);
276 
277 		spin_lock_irq(&crtc->dev->event_lock);
278 		drm_crtc_arm_vblank_event(crtc, event);
279 		spin_unlock_irq(&crtc->dev->event_lock);
280 	}
281 }
282 
283 static const struct drm_crtc_helper_funcs zynqmp_dpsub_crtc_helper_funcs = {
284 	.atomic_enable	= zynqmp_dpsub_crtc_atomic_enable,
285 	.atomic_disable	= zynqmp_dpsub_crtc_atomic_disable,
286 	.atomic_check	= zynqmp_dpsub_crtc_atomic_check,
287 	.atomic_begin	= zynqmp_dpsub_crtc_atomic_begin,
288 	.atomic_flush	= zynqmp_dpsub_crtc_atomic_flush,
289 };
290 
291 static int zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc *crtc)
292 {
293 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
294 
295 	zynqmp_dp_enable_vblank(dpsub->dp);
296 
297 	return 0;
298 }
299 
300 static void zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc *crtc)
301 {
302 	struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
303 
304 	zynqmp_dp_disable_vblank(dpsub->dp);
305 }
306 
307 static const struct drm_crtc_funcs zynqmp_dpsub_crtc_funcs = {
308 	.destroy		= drm_crtc_cleanup,
309 	.set_config		= drm_atomic_helper_set_config,
310 	.page_flip		= drm_atomic_helper_page_flip,
311 	.reset			= drm_atomic_helper_crtc_reset,
312 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
313 	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
314 	.enable_vblank		= zynqmp_dpsub_crtc_enable_vblank,
315 	.disable_vblank		= zynqmp_dpsub_crtc_disable_vblank,
316 };
317 
318 static int zynqmp_dpsub_create_crtc(struct zynqmp_dpsub *dpsub)
319 {
320 	struct drm_plane *plane = &dpsub->drm->planes[ZYNQMP_DPSUB_LAYER_GFX];
321 	struct drm_crtc *crtc = &dpsub->drm->crtc;
322 	int ret;
323 
324 	ret = drm_crtc_init_with_planes(&dpsub->drm->dev, crtc, plane,
325 					NULL, &zynqmp_dpsub_crtc_funcs, NULL);
326 	if (ret < 0)
327 		return ret;
328 
329 	drm_crtc_helper_add(crtc, &zynqmp_dpsub_crtc_helper_funcs);
330 
331 	/* Start with vertical blanking interrupt reporting disabled. */
332 	drm_crtc_vblank_off(crtc);
333 
334 	return 0;
335 }
336 
337 static void zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub *dpsub)
338 {
339 	u32 possible_crtcs = drm_crtc_mask(&dpsub->drm->crtc);
340 	unsigned int i;
341 
342 	for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++)
343 		dpsub->drm->planes[i].possible_crtcs = possible_crtcs;
344 }
345 
346 /**
347  * zynqmp_dpsub_drm_handle_vblank - Handle the vblank event
348  * @dpsub: DisplayPort subsystem
349  *
350  * This function handles the vblank interrupt, and sends an event to
351  * CRTC object. This will be called by the DP vblank interrupt handler.
352  */
353 void zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub *dpsub)
354 {
355 	drm_crtc_handle_vblank(&dpsub->drm->crtc);
356 }
357 
358 /* -----------------------------------------------------------------------------
359  * Dumb Buffer & Framebuffer Allocation
360  */
361 
362 static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
363 				    struct drm_device *drm,
364 				    struct drm_mode_create_dumb *args)
365 {
366 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
367 	int ret;
368 
369 	/* Enforce the alignment constraints of the DMA engine. */
370 	ret = drm_mode_size_dumb(drm, args, dpsub->dma_align, 0);
371 	if (ret)
372 		return ret;
373 
374 	return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
375 }
376 
377 static struct drm_framebuffer *
378 zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
379 		       const struct drm_format_info *info,
380 		       const struct drm_mode_fb_cmd2 *mode_cmd)
381 {
382 	struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
383 	struct drm_mode_fb_cmd2 cmd = *mode_cmd;
384 	unsigned int i;
385 
386 	/* Enforce the alignment constraints of the DMA engine. */
387 	for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
388 		cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
389 
390 	return drm_gem_fb_create(drm, file_priv, info, &cmd);
391 }
392 
393 static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
394 	.fb_create		= zynqmp_dpsub_fb_create,
395 	.atomic_check		= drm_atomic_helper_check,
396 	.atomic_commit		= drm_atomic_helper_commit,
397 };
398 
399 /* -----------------------------------------------------------------------------
400  * DRM/KMS Driver
401  */
402 
403 DEFINE_DRM_GEM_DMA_FOPS(zynqmp_dpsub_drm_fops);
404 
405 static const struct drm_driver zynqmp_dpsub_drm_driver = {
406 	.driver_features		= DRIVER_MODESET | DRIVER_GEM |
407 					  DRIVER_ATOMIC,
408 
409 	DRM_GEM_DMA_DRIVER_OPS_WITH_DUMB_CREATE(zynqmp_dpsub_dumb_create),
410 	DRM_FBDEV_DMA_DRIVER_OPS,
411 
412 	.fops				= &zynqmp_dpsub_drm_fops,
413 
414 	.name				= "zynqmp-dpsub",
415 	.desc				= "Xilinx DisplayPort Subsystem Driver",
416 	.major				= 1,
417 	.minor				= 0,
418 };
419 
420 static int zynqmp_dpsub_kms_init(struct zynqmp_dpsub *dpsub)
421 {
422 	struct drm_encoder *encoder = &dpsub->drm->encoder;
423 	struct drm_connector *connector;
424 	int ret;
425 
426 	/* Create the planes and the CRTC. */
427 	ret = zynqmp_dpsub_create_planes(dpsub);
428 	if (ret)
429 		return ret;
430 
431 	ret = zynqmp_dpsub_create_crtc(dpsub);
432 	if (ret < 0)
433 		return ret;
434 
435 	zynqmp_dpsub_map_crtc_to_plane(dpsub);
436 
437 	/* Create the encoder and attach the bridge. */
438 	encoder->possible_crtcs |= drm_crtc_mask(&dpsub->drm->crtc);
439 	drm_simple_encoder_init(&dpsub->drm->dev, encoder, DRM_MODE_ENCODER_NONE);
440 
441 	ret = drm_bridge_attach(encoder, dpsub->bridge, NULL,
442 				DRM_BRIDGE_ATTACH_NO_CONNECTOR);
443 	if (ret) {
444 		dev_err(dpsub->dev, "failed to attach bridge to encoder\n");
445 		goto err_encoder;
446 	}
447 
448 	/* Create the connector for the chain of bridges. */
449 	connector = drm_bridge_connector_init(&dpsub->drm->dev, encoder);
450 	if (IS_ERR(connector)) {
451 		dev_err(dpsub->dev, "failed to created connector\n");
452 		ret = PTR_ERR(connector);
453 		goto err_encoder;
454 	}
455 
456 	ret = drm_connector_attach_encoder(connector, encoder);
457 	if (ret < 0) {
458 		dev_err(dpsub->dev, "failed to attach connector to encoder\n");
459 		goto err_encoder;
460 	}
461 
462 	return 0;
463 
464 err_encoder:
465 	drm_encoder_cleanup(encoder);
466 	return ret;
467 }
468 
469 static void zynqmp_dpsub_drm_release(struct drm_device *drm, void *res)
470 {
471 	struct zynqmp_dpsub_drm *dpdrm = res;
472 
473 	zynqmp_dpsub_release(dpdrm->dpsub);
474 }
475 
476 int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
477 {
478 	struct zynqmp_dpsub_drm *dpdrm;
479 	struct drm_device *drm;
480 	int ret;
481 
482 	/*
483 	 * Allocate the drm_device and immediately add a cleanup action to
484 	 * release the zynqmp_dpsub instance. If any of those operations fail,
485 	 * dpsub->drm will remain NULL, which tells the caller that it must
486 	 * cleanup manually.
487 	 */
488 	dpdrm = devm_drm_dev_alloc(dpsub->dev, &zynqmp_dpsub_drm_driver,
489 				   struct zynqmp_dpsub_drm, dev);
490 	if (IS_ERR(dpdrm))
491 		return PTR_ERR(dpdrm);
492 
493 	dpdrm->dpsub = dpsub;
494 	drm = &dpdrm->dev;
495 
496 	ret = drmm_add_action(drm, zynqmp_dpsub_drm_release, dpdrm);
497 	if (ret < 0)
498 		return ret;
499 
500 	dpsub->drm = dpdrm;
501 
502 	/* Initialize mode config, vblank and the KMS poll helper. */
503 	ret = drmm_mode_config_init(drm);
504 	if (ret < 0)
505 		return ret;
506 
507 	drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
508 	drm->mode_config.min_width = 0;
509 	drm->mode_config.min_height = 0;
510 	drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
511 	drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
512 
513 	ret = drm_vblank_init(drm, 1);
514 	if (ret)
515 		return ret;
516 
517 	ret = zynqmp_dpsub_kms_init(dpsub);
518 	if (ret < 0)
519 		goto err_poll_fini;
520 
521 	drm_kms_helper_poll_init(drm);
522 
523 	/* Reset all components and register the DRM device. */
524 	drm_mode_config_reset(drm);
525 
526 	ret = drm_dev_register(drm, 0);
527 	if (ret < 0)
528 		goto err_poll_fini;
529 
530 	/* Initialize fbdev generic emulation. */
531 	drm_client_setup_with_fourcc(drm, DRM_FORMAT_RGB888);
532 
533 	return 0;
534 
535 err_poll_fini:
536 	drm_kms_helper_poll_fini(drm);
537 	return ret;
538 }
539 
540 void zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub *dpsub)
541 {
542 	struct drm_device *drm = &dpsub->drm->dev;
543 
544 	drm_dev_unplug(drm);
545 	drm_atomic_helper_shutdown(drm);
546 	drm_encoder_cleanup(&dpsub->drm->encoder);
547 	drm_kms_helper_poll_fini(drm);
548 }
549