1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * ZynqMP DisplayPort Subsystem - KMS API
4 *
5 * Copyright (C) 2017 - 2021 Xilinx, Inc.
6 *
7 * Authors:
8 * - Hyun Woo Kwon <hyun.kwon@xilinx.com>
9 * - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
10 */
11
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_blend.h>
15 #include <drm/drm_bridge.h>
16 #include <drm/drm_bridge_connector.h>
17 #include <drm/drm_connector.h>
18 #include <drm/drm_crtc.h>
19 #include <drm/drm_device.h>
20 #include <drm/drm_drv.h>
21 #include <drm/drm_encoder.h>
22 #include <drm/drm_fbdev_dma.h>
23 #include <drm/drm_fourcc.h>
24 #include <drm/drm_framebuffer.h>
25 #include <drm/drm_gem_dma_helper.h>
26 #include <drm/drm_gem_framebuffer_helper.h>
27 #include <drm/drm_managed.h>
28 #include <drm/drm_mode_config.h>
29 #include <drm/drm_plane.h>
30 #include <drm/drm_probe_helper.h>
31 #include <drm/drm_simple_kms_helper.h>
32 #include <drm/drm_vblank.h>
33
34 #include <linux/clk.h>
35 #include <linux/delay.h>
36 #include <linux/pm_runtime.h>
37 #include <linux/spinlock.h>
38
39 #include "zynqmp_disp.h"
40 #include "zynqmp_dp.h"
41 #include "zynqmp_dpsub.h"
42 #include "zynqmp_kms.h"
43
to_zynqmp_dpsub(struct drm_device * drm)44 static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
45 {
46 return container_of(drm, struct zynqmp_dpsub_drm, dev)->dpsub;
47 }
48
49 /* -----------------------------------------------------------------------------
50 * DRM Planes
51 */
52
zynqmp_dpsub_plane_atomic_check(struct drm_plane * plane,struct drm_atomic_state * state)53 static int zynqmp_dpsub_plane_atomic_check(struct drm_plane *plane,
54 struct drm_atomic_state *state)
55 {
56 struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
57 plane);
58 struct drm_crtc_state *crtc_state;
59
60 if (!new_plane_state->crtc)
61 return 0;
62
63 crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc);
64 if (IS_ERR(crtc_state))
65 return PTR_ERR(crtc_state);
66
67 return drm_atomic_helper_check_plane_state(new_plane_state,
68 crtc_state,
69 DRM_PLANE_NO_SCALING,
70 DRM_PLANE_NO_SCALING,
71 false, false);
72 }
73
zynqmp_dpsub_plane_atomic_disable(struct drm_plane * plane,struct drm_atomic_state * state)74 static void zynqmp_dpsub_plane_atomic_disable(struct drm_plane *plane,
75 struct drm_atomic_state *state)
76 {
77 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
78 plane);
79 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
80 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
81
82 if (!old_state->fb)
83 return;
84
85 zynqmp_disp_layer_disable(layer);
86
87 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
88 zynqmp_disp_blend_set_global_alpha(dpsub->disp, false,
89 plane->state->alpha >> 8);
90 }
91
zynqmp_dpsub_plane_atomic_update(struct drm_plane * plane,struct drm_atomic_state * state)92 static void zynqmp_dpsub_plane_atomic_update(struct drm_plane *plane,
93 struct drm_atomic_state *state)
94 {
95 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
96 struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
97 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
98 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
99 bool format_changed = false;
100
101 if (!old_state->fb ||
102 old_state->fb->format->format != new_state->fb->format->format)
103 format_changed = true;
104
105 /*
106 * If the format has changed (including going from a previously
107 * disabled state to any format), reconfigure the format. Disable the
108 * plane first if needed.
109 */
110 if (format_changed) {
111 if (old_state->fb)
112 zynqmp_disp_layer_disable(layer);
113
114 zynqmp_disp_layer_set_format(layer, new_state->fb->format);
115 }
116
117 zynqmp_disp_layer_update(layer, new_state);
118
119 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
120 zynqmp_disp_blend_set_global_alpha(dpsub->disp, true,
121 plane->state->alpha >> 8);
122
123 /*
124 * Unconditionally enable the layer, as it may have been disabled
125 * previously either explicitly to reconfigure layer format, or
126 * implicitly after DPSUB reset during display mode change. DRM
127 * framework calls this callback for enabled planes only.
128 */
129 zynqmp_disp_layer_enable(layer);
130 }
131
132 static const struct drm_plane_helper_funcs zynqmp_dpsub_plane_helper_funcs = {
133 .atomic_check = zynqmp_dpsub_plane_atomic_check,
134 .atomic_update = zynqmp_dpsub_plane_atomic_update,
135 .atomic_disable = zynqmp_dpsub_plane_atomic_disable,
136 };
137
138 static const struct drm_plane_funcs zynqmp_dpsub_plane_funcs = {
139 .update_plane = drm_atomic_helper_update_plane,
140 .disable_plane = drm_atomic_helper_disable_plane,
141 .destroy = drm_plane_cleanup,
142 .reset = drm_atomic_helper_plane_reset,
143 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
144 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
145 };
146
zynqmp_dpsub_create_planes(struct zynqmp_dpsub * dpsub)147 static int zynqmp_dpsub_create_planes(struct zynqmp_dpsub *dpsub)
148 {
149 unsigned int i;
150 int ret;
151
152 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++) {
153 struct zynqmp_disp_layer *layer = dpsub->layers[i];
154 struct drm_plane *plane = &dpsub->drm->planes[i];
155 enum drm_plane_type type;
156 unsigned int num_formats;
157 u32 *formats;
158
159 formats = zynqmp_disp_layer_drm_formats(layer, &num_formats);
160 if (!formats)
161 return -ENOMEM;
162
163 /* Graphics layer is primary, and video layer is overlay. */
164 type = i == ZYNQMP_DPSUB_LAYER_VID
165 ? DRM_PLANE_TYPE_OVERLAY : DRM_PLANE_TYPE_PRIMARY;
166 ret = drm_universal_plane_init(&dpsub->drm->dev, plane, 0,
167 &zynqmp_dpsub_plane_funcs,
168 formats, num_formats,
169 NULL, type, NULL);
170 kfree(formats);
171 if (ret)
172 return ret;
173
174 drm_plane_helper_add(plane, &zynqmp_dpsub_plane_helper_funcs);
175
176 drm_plane_create_zpos_immutable_property(plane, i);
177 if (i == ZYNQMP_DPSUB_LAYER_GFX)
178 drm_plane_create_alpha_property(plane);
179 }
180
181 return 0;
182 }
183
184 /* -----------------------------------------------------------------------------
185 * DRM CRTC
186 */
187
crtc_to_dpsub(struct drm_crtc * crtc)188 static inline struct zynqmp_dpsub *crtc_to_dpsub(struct drm_crtc *crtc)
189 {
190 return container_of(crtc, struct zynqmp_dpsub_drm, crtc)->dpsub;
191 }
192
zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_atomic_state * state)193 static void zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc *crtc,
194 struct drm_atomic_state *state)
195 {
196 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
197 struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
198 int ret, vrefresh;
199
200 pm_runtime_get_sync(dpsub->dev);
201
202 zynqmp_disp_setup_clock(dpsub->disp, adjusted_mode->clock * 1000);
203
204 ret = clk_prepare_enable(dpsub->vid_clk);
205 if (ret) {
206 dev_err(dpsub->dev, "failed to enable a pixel clock\n");
207 pm_runtime_put_sync(dpsub->dev);
208 return;
209 }
210
211 zynqmp_disp_enable(dpsub->disp);
212
213 /* Delay of 3 vblank intervals for timing gen to be stable */
214 vrefresh = (adjusted_mode->clock * 1000) /
215 (adjusted_mode->vtotal * adjusted_mode->htotal);
216 msleep(3 * 1000 / vrefresh);
217 }
218
zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_atomic_state * state)219 static void zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc *crtc,
220 struct drm_atomic_state *state)
221 {
222 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
223 struct drm_plane_state *old_plane_state;
224
225 /*
226 * Disable the plane if active. The old plane state can be NULL in the
227 * .shutdown() path if the plane is already disabled, skip
228 * zynqmp_disp_plane_atomic_disable() in that case.
229 */
230 old_plane_state = drm_atomic_get_old_plane_state(state, crtc->primary);
231 if (old_plane_state)
232 zynqmp_dpsub_plane_atomic_disable(crtc->primary, state);
233
234 zynqmp_disp_disable(dpsub->disp);
235
236 drm_crtc_vblank_off(crtc);
237
238 spin_lock_irq(&crtc->dev->event_lock);
239 if (crtc->state->event) {
240 drm_crtc_send_vblank_event(crtc, crtc->state->event);
241 crtc->state->event = NULL;
242 }
243 spin_unlock_irq(&crtc->dev->event_lock);
244
245 clk_disable_unprepare(dpsub->vid_clk);
246 pm_runtime_put_sync(dpsub->dev);
247 }
248
zynqmp_dpsub_crtc_atomic_check(struct drm_crtc * crtc,struct drm_atomic_state * state)249 static int zynqmp_dpsub_crtc_atomic_check(struct drm_crtc *crtc,
250 struct drm_atomic_state *state)
251 {
252 return drm_atomic_add_affected_planes(state, crtc);
253 }
254
zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_atomic_state * state)255 static void zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc *crtc,
256 struct drm_atomic_state *state)
257 {
258 drm_crtc_vblank_on(crtc);
259 }
260
zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_atomic_state * state)261 static void zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc *crtc,
262 struct drm_atomic_state *state)
263 {
264 if (crtc->state->event) {
265 struct drm_pending_vblank_event *event;
266
267 /* Consume the flip_done event from atomic helper. */
268 event = crtc->state->event;
269 crtc->state->event = NULL;
270
271 event->pipe = drm_crtc_index(crtc);
272
273 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
274
275 spin_lock_irq(&crtc->dev->event_lock);
276 drm_crtc_arm_vblank_event(crtc, event);
277 spin_unlock_irq(&crtc->dev->event_lock);
278 }
279 }
280
281 static const struct drm_crtc_helper_funcs zynqmp_dpsub_crtc_helper_funcs = {
282 .atomic_enable = zynqmp_dpsub_crtc_atomic_enable,
283 .atomic_disable = zynqmp_dpsub_crtc_atomic_disable,
284 .atomic_check = zynqmp_dpsub_crtc_atomic_check,
285 .atomic_begin = zynqmp_dpsub_crtc_atomic_begin,
286 .atomic_flush = zynqmp_dpsub_crtc_atomic_flush,
287 };
288
zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc * crtc)289 static int zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc *crtc)
290 {
291 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
292
293 zynqmp_dp_enable_vblank(dpsub->dp);
294
295 return 0;
296 }
297
zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc * crtc)298 static void zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc *crtc)
299 {
300 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
301
302 zynqmp_dp_disable_vblank(dpsub->dp);
303 }
304
305 static const struct drm_crtc_funcs zynqmp_dpsub_crtc_funcs = {
306 .destroy = drm_crtc_cleanup,
307 .set_config = drm_atomic_helper_set_config,
308 .page_flip = drm_atomic_helper_page_flip,
309 .reset = drm_atomic_helper_crtc_reset,
310 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
311 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
312 .enable_vblank = zynqmp_dpsub_crtc_enable_vblank,
313 .disable_vblank = zynqmp_dpsub_crtc_disable_vblank,
314 };
315
zynqmp_dpsub_create_crtc(struct zynqmp_dpsub * dpsub)316 static int zynqmp_dpsub_create_crtc(struct zynqmp_dpsub *dpsub)
317 {
318 struct drm_plane *plane = &dpsub->drm->planes[ZYNQMP_DPSUB_LAYER_GFX];
319 struct drm_crtc *crtc = &dpsub->drm->crtc;
320 int ret;
321
322 ret = drm_crtc_init_with_planes(&dpsub->drm->dev, crtc, plane,
323 NULL, &zynqmp_dpsub_crtc_funcs, NULL);
324 if (ret < 0)
325 return ret;
326
327 drm_crtc_helper_add(crtc, &zynqmp_dpsub_crtc_helper_funcs);
328
329 /* Start with vertical blanking interrupt reporting disabled. */
330 drm_crtc_vblank_off(crtc);
331
332 return 0;
333 }
334
zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub * dpsub)335 static void zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub *dpsub)
336 {
337 u32 possible_crtcs = drm_crtc_mask(&dpsub->drm->crtc);
338 unsigned int i;
339
340 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++)
341 dpsub->drm->planes[i].possible_crtcs = possible_crtcs;
342 }
343
344 /**
345 * zynqmp_dpsub_drm_handle_vblank - Handle the vblank event
346 * @dpsub: DisplayPort subsystem
347 *
348 * This function handles the vblank interrupt, and sends an event to
349 * CRTC object. This will be called by the DP vblank interrupt handler.
350 */
zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub * dpsub)351 void zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub *dpsub)
352 {
353 drm_crtc_handle_vblank(&dpsub->drm->crtc);
354 }
355
356 /* -----------------------------------------------------------------------------
357 * Dumb Buffer & Framebuffer Allocation
358 */
359
zynqmp_dpsub_dumb_create(struct drm_file * file_priv,struct drm_device * drm,struct drm_mode_create_dumb * args)360 static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
361 struct drm_device *drm,
362 struct drm_mode_create_dumb *args)
363 {
364 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
365 unsigned int pitch = DIV_ROUND_UP(args->width * args->bpp, 8);
366
367 /* Enforce the alignment constraints of the DMA engine. */
368 args->pitch = ALIGN(pitch, dpsub->dma_align);
369
370 return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
371 }
372
373 static struct drm_framebuffer *
zynqmp_dpsub_fb_create(struct drm_device * drm,struct drm_file * file_priv,const struct drm_mode_fb_cmd2 * mode_cmd)374 zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
375 const struct drm_mode_fb_cmd2 *mode_cmd)
376 {
377 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
378 struct drm_mode_fb_cmd2 cmd = *mode_cmd;
379 unsigned int i;
380
381 /* Enforce the alignment constraints of the DMA engine. */
382 for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
383 cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
384
385 return drm_gem_fb_create(drm, file_priv, &cmd);
386 }
387
388 static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
389 .fb_create = zynqmp_dpsub_fb_create,
390 .atomic_check = drm_atomic_helper_check,
391 .atomic_commit = drm_atomic_helper_commit,
392 };
393
394 /* -----------------------------------------------------------------------------
395 * DRM/KMS Driver
396 */
397
398 DEFINE_DRM_GEM_DMA_FOPS(zynqmp_dpsub_drm_fops);
399
400 static const struct drm_driver zynqmp_dpsub_drm_driver = {
401 .driver_features = DRIVER_MODESET | DRIVER_GEM |
402 DRIVER_ATOMIC,
403
404 DRM_GEM_DMA_DRIVER_OPS_WITH_DUMB_CREATE(zynqmp_dpsub_dumb_create),
405
406 .fops = &zynqmp_dpsub_drm_fops,
407
408 .name = "zynqmp-dpsub",
409 .desc = "Xilinx DisplayPort Subsystem Driver",
410 .date = "20130509",
411 .major = 1,
412 .minor = 0,
413 };
414
zynqmp_dpsub_kms_init(struct zynqmp_dpsub * dpsub)415 static int zynqmp_dpsub_kms_init(struct zynqmp_dpsub *dpsub)
416 {
417 struct drm_encoder *encoder = &dpsub->drm->encoder;
418 struct drm_connector *connector;
419 int ret;
420
421 /* Create the planes and the CRTC. */
422 ret = zynqmp_dpsub_create_planes(dpsub);
423 if (ret)
424 return ret;
425
426 ret = zynqmp_dpsub_create_crtc(dpsub);
427 if (ret < 0)
428 return ret;
429
430 zynqmp_dpsub_map_crtc_to_plane(dpsub);
431
432 /* Create the encoder and attach the bridge. */
433 encoder->possible_crtcs |= drm_crtc_mask(&dpsub->drm->crtc);
434 drm_simple_encoder_init(&dpsub->drm->dev, encoder, DRM_MODE_ENCODER_NONE);
435
436 ret = drm_bridge_attach(encoder, dpsub->bridge, NULL,
437 DRM_BRIDGE_ATTACH_NO_CONNECTOR);
438 if (ret) {
439 dev_err(dpsub->dev, "failed to attach bridge to encoder\n");
440 goto err_encoder;
441 }
442
443 /* Create the connector for the chain of bridges. */
444 connector = drm_bridge_connector_init(&dpsub->drm->dev, encoder);
445 if (IS_ERR(connector)) {
446 dev_err(dpsub->dev, "failed to created connector\n");
447 ret = PTR_ERR(connector);
448 goto err_encoder;
449 }
450
451 ret = drm_connector_attach_encoder(connector, encoder);
452 if (ret < 0) {
453 dev_err(dpsub->dev, "failed to attach connector to encoder\n");
454 goto err_encoder;
455 }
456
457 return 0;
458
459 err_encoder:
460 drm_encoder_cleanup(encoder);
461 return ret;
462 }
463
zynqmp_dpsub_drm_release(struct drm_device * drm,void * res)464 static void zynqmp_dpsub_drm_release(struct drm_device *drm, void *res)
465 {
466 struct zynqmp_dpsub_drm *dpdrm = res;
467
468 zynqmp_dpsub_release(dpdrm->dpsub);
469 }
470
zynqmp_dpsub_drm_init(struct zynqmp_dpsub * dpsub)471 int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
472 {
473 struct zynqmp_dpsub_drm *dpdrm;
474 struct drm_device *drm;
475 int ret;
476
477 /*
478 * Allocate the drm_device and immediately add a cleanup action to
479 * release the zynqmp_dpsub instance. If any of those operations fail,
480 * dpsub->drm will remain NULL, which tells the caller that it must
481 * cleanup manually.
482 */
483 dpdrm = devm_drm_dev_alloc(dpsub->dev, &zynqmp_dpsub_drm_driver,
484 struct zynqmp_dpsub_drm, dev);
485 if (IS_ERR(dpdrm))
486 return PTR_ERR(dpdrm);
487
488 dpdrm->dpsub = dpsub;
489 drm = &dpdrm->dev;
490
491 ret = drmm_add_action(drm, zynqmp_dpsub_drm_release, dpdrm);
492 if (ret < 0)
493 return ret;
494
495 dpsub->drm = dpdrm;
496
497 /* Initialize mode config, vblank and the KMS poll helper. */
498 ret = drmm_mode_config_init(drm);
499 if (ret < 0)
500 return ret;
501
502 drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
503 drm->mode_config.min_width = 0;
504 drm->mode_config.min_height = 0;
505 drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
506 drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
507
508 ret = drm_vblank_init(drm, 1);
509 if (ret)
510 return ret;
511
512 drm_kms_helper_poll_init(drm);
513
514 ret = zynqmp_dpsub_kms_init(dpsub);
515 if (ret < 0)
516 goto err_poll_fini;
517
518 /* Reset all components and register the DRM device. */
519 drm_mode_config_reset(drm);
520
521 ret = drm_dev_register(drm, 0);
522 if (ret < 0)
523 goto err_poll_fini;
524
525 /* Initialize fbdev generic emulation. */
526 drm_fbdev_dma_setup(drm, 24);
527
528 return 0;
529
530 err_poll_fini:
531 drm_kms_helper_poll_fini(drm);
532 return ret;
533 }
534
zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub * dpsub)535 void zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub *dpsub)
536 {
537 struct drm_device *drm = &dpsub->drm->dev;
538
539 drm_dev_unregister(drm);
540 drm_atomic_helper_shutdown(drm);
541 drm_encoder_cleanup(&dpsub->drm->encoder);
542 drm_kms_helper_poll_fini(drm);
543 }
544