1 // SPDX-License-Identifier: GPL-2.0-only 2 3 #include <linux/component.h> 4 #include <linux/iopoll.h> 5 #include <linux/of.h> 6 #include <linux/platform_device.h> 7 8 #include <drm/drm_atomic.h> 9 #include <drm/drm_atomic_helper.h> 10 #include <drm/drm_bridge.h> 11 #include <drm/drm_bridge_connector.h> 12 #include <drm/drm_drv.h> 13 #include <drm/drm_fb_dma_helper.h> 14 #include <drm/drm_framebuffer.h> 15 #include <drm/drm_gem_atomic_helper.h> 16 #include <drm/drm_gem_dma_helper.h> 17 #include <drm/drm_gem_framebuffer_helper.h> 18 #include <drm/drm_of.h> 19 #include <drm/drm_probe_helper.h> 20 #include <drm/drm_vblank.h> 21 22 #define ADP_INT_STATUS 0x34 23 #define ADP_INT_STATUS_INT_MASK 0x7 24 #define ADP_INT_STATUS_VBLANK 0x1 25 #define ADP_CTRL 0x100 26 #define ADP_CTRL_VBLANK_ON 0x12 27 #define ADP_CTRL_FIFO_ON 0x601 28 #define ADP_SCREEN_SIZE 0x0c 29 #define ADP_SCREEN_HSIZE GENMASK(15, 0) 30 #define ADP_SCREEN_VSIZE GENMASK(31, 16) 31 32 #define ADBE_FIFO 0x10c0 33 #define ADBE_FIFO_SYNC 0xc0000000 34 35 #define ADBE_BLEND_BYPASS 0x2020 36 #define ADBE_BLEND_EN1 0x2028 37 #define ADBE_BLEND_EN2 0x2074 38 #define ADBE_BLEND_EN3 0x202c 39 #define ADBE_BLEND_EN4 0x2034 40 #define ADBE_MASK_BUF 0x2200 41 42 #define ADBE_SRC_START 0x4040 43 #define ADBE_SRC_SIZE 0x4048 44 #define ADBE_DST_START 0x4050 45 #define ADBE_DST_SIZE 0x4054 46 #define ADBE_STRIDE 0x4038 47 #define ADBE_FB_BASE 0x4030 48 49 #define ADBE_LAYER_EN1 0x4020 50 #define ADBE_LAYER_EN2 0x4068 51 #define ADBE_LAYER_EN3 0x40b4 52 #define ADBE_LAYER_EN4 0x40f4 53 #define ADBE_SCALE_CTL 0x40ac 54 #define ADBE_SCALE_CTL_BYPASS 0x100000 55 56 #define ADBE_LAYER_CTL 0x1038 57 #define ADBE_LAYER_CTL_ENABLE 0x10000 58 59 #define ADBE_PIX_FMT 0x402c 60 #define ADBE_PIX_FMT_XRGB32 0x53e4001 61 62 static int adp_open(struct inode *inode, struct file *filp) 63 { 64 /* 65 * The modesetting driver does not check the non-desktop connector 66 * property and keeps the device open and locked. If the touchbar daemon 67 * opens the device first, modesetting breaks the whole X session. 68 * Simply refuse to open the device for X11 server processes as 69 * workaround. 70 */ 71 if (current->comm[0] == 'X') 72 return -EBUSY; 73 74 return drm_open(inode, filp); 75 } 76 77 static const struct file_operations adp_fops = { 78 .owner = THIS_MODULE, 79 .open = adp_open, 80 .release = drm_release, 81 .unlocked_ioctl = drm_ioctl, 82 .compat_ioctl = drm_compat_ioctl, 83 .poll = drm_poll, 84 .read = drm_read, 85 .llseek = noop_llseek, 86 .mmap = drm_gem_mmap, 87 .fop_flags = FOP_UNSIGNED_OFFSET, 88 DRM_GEM_DMA_UNMAPPED_AREA_FOPS 89 }; 90 91 static int adp_drm_gem_dumb_create(struct drm_file *file_priv, 92 struct drm_device *drm, 93 struct drm_mode_create_dumb *args) 94 { 95 args->height = ALIGN(args->height, 64); 96 args->size = args->pitch * args->height; 97 98 return drm_gem_dma_dumb_create_internal(file_priv, drm, args); 99 } 100 101 static const struct drm_driver adp_driver = { 102 .driver_features = DRIVER_GEM | DRIVER_MODESET | DRIVER_ATOMIC, 103 .fops = &adp_fops, 104 DRM_GEM_DMA_DRIVER_OPS_VMAP_WITH_DUMB_CREATE(adp_drm_gem_dumb_create), 105 .name = "adp", 106 .desc = "Apple Display Pipe DRM Driver", 107 .major = 0, 108 .minor = 1, 109 }; 110 111 struct adp_drv_private { 112 struct drm_device drm; 113 struct drm_crtc crtc; 114 struct drm_encoder *encoder; 115 struct drm_connector *connector; 116 struct drm_bridge *next_bridge; 117 void __iomem *be; 118 void __iomem *fe; 119 u32 *mask_buf; 120 u64 mask_buf_size; 121 dma_addr_t mask_iova; 122 int be_irq; 123 int fe_irq; 124 spinlock_t irq_lock; 125 struct drm_pending_vblank_event *event; 126 }; 127 128 #define to_adp(x) container_of(x, struct adp_drv_private, drm) 129 #define crtc_to_adp(x) container_of(x, struct adp_drv_private, crtc) 130 131 static int adp_plane_atomic_check(struct drm_plane *plane, 132 struct drm_atomic_state *state) 133 { 134 struct drm_plane_state *new_plane_state; 135 struct drm_crtc_state *crtc_state; 136 137 new_plane_state = drm_atomic_get_new_plane_state(state, plane); 138 139 if (!new_plane_state->crtc) 140 return 0; 141 142 crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc); 143 if (IS_ERR(crtc_state)) 144 return PTR_ERR(crtc_state); 145 146 return drm_atomic_helper_check_plane_state(new_plane_state, 147 crtc_state, 148 DRM_PLANE_NO_SCALING, 149 DRM_PLANE_NO_SCALING, 150 true, true); 151 } 152 153 static void adp_plane_atomic_update(struct drm_plane *plane, 154 struct drm_atomic_state *state) 155 { 156 struct adp_drv_private *adp; 157 struct drm_rect src_rect; 158 struct drm_gem_dma_object *obj; 159 struct drm_framebuffer *fb; 160 struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane); 161 u32 src_pos, src_size, dst_pos, dst_size; 162 163 if (!plane || !new_state) 164 return; 165 166 fb = new_state->fb; 167 if (!fb) 168 return; 169 adp = to_adp(plane->dev); 170 171 drm_rect_fp_to_int(&src_rect, &new_state->src); 172 src_pos = src_rect.x1 << 16 | src_rect.y1; 173 dst_pos = new_state->dst.x1 << 16 | new_state->dst.y1; 174 src_size = drm_rect_width(&src_rect) << 16 | drm_rect_height(&src_rect); 175 dst_size = drm_rect_width(&new_state->dst) << 16 | 176 drm_rect_height(&new_state->dst); 177 writel(src_pos, adp->be + ADBE_SRC_START); 178 writel(src_size, adp->be + ADBE_SRC_SIZE); 179 writel(dst_pos, adp->be + ADBE_DST_START); 180 writel(dst_size, adp->be + ADBE_DST_SIZE); 181 writel(fb->pitches[0], adp->be + ADBE_STRIDE); 182 obj = drm_fb_dma_get_gem_obj(fb, 0); 183 if (obj) 184 writel(obj->dma_addr + fb->offsets[0], adp->be + ADBE_FB_BASE); 185 186 writel(BIT(0), adp->be + ADBE_LAYER_EN1); 187 writel(BIT(0), adp->be + ADBE_LAYER_EN2); 188 writel(BIT(0), adp->be + ADBE_LAYER_EN3); 189 writel(BIT(0), adp->be + ADBE_LAYER_EN4); 190 writel(ADBE_SCALE_CTL_BYPASS, adp->be + ADBE_SCALE_CTL); 191 writel(ADBE_LAYER_CTL_ENABLE | BIT(0), adp->be + ADBE_LAYER_CTL); 192 writel(ADBE_PIX_FMT_XRGB32, adp->be + ADBE_PIX_FMT); 193 } 194 195 static void adp_plane_atomic_disable(struct drm_plane *plane, 196 struct drm_atomic_state *state) 197 { 198 struct adp_drv_private *adp = to_adp(plane->dev); 199 200 writel(0x0, adp->be + ADBE_LAYER_EN1); 201 writel(0x0, adp->be + ADBE_LAYER_EN2); 202 writel(0x0, adp->be + ADBE_LAYER_EN3); 203 writel(0x0, adp->be + ADBE_LAYER_EN4); 204 writel(ADBE_LAYER_CTL_ENABLE, adp->be + ADBE_LAYER_CTL); 205 } 206 207 static const struct drm_plane_helper_funcs adp_plane_helper_funcs = { 208 .atomic_check = adp_plane_atomic_check, 209 .atomic_update = adp_plane_atomic_update, 210 .atomic_disable = adp_plane_atomic_disable, 211 DRM_GEM_SHADOW_PLANE_HELPER_FUNCS 212 }; 213 214 static const struct drm_plane_funcs adp_plane_funcs = { 215 .update_plane = drm_atomic_helper_update_plane, 216 .disable_plane = drm_atomic_helper_disable_plane, 217 DRM_GEM_SHADOW_PLANE_FUNCS 218 }; 219 220 static const u32 plane_formats[] = { 221 DRM_FORMAT_XRGB8888, 222 }; 223 224 #define ALL_CRTCS 1 225 226 static struct drm_plane *adp_plane_new(struct adp_drv_private *adp) 227 { 228 struct drm_device *drm = &adp->drm; 229 struct drm_plane *plane; 230 231 plane = __drmm_universal_plane_alloc(drm, sizeof(struct drm_plane), 0, 232 ALL_CRTCS, &adp_plane_funcs, 233 plane_formats, ARRAY_SIZE(plane_formats), 234 NULL, DRM_PLANE_TYPE_PRIMARY, "plane"); 235 if (IS_ERR(plane)) { 236 drm_err(drm, "failed to allocate plane"); 237 return plane; 238 } 239 240 drm_plane_helper_add(plane, &adp_plane_helper_funcs); 241 return plane; 242 } 243 244 static void adp_enable_vblank(struct adp_drv_private *adp) 245 { 246 u32 cur_ctrl; 247 248 writel(ADP_INT_STATUS_INT_MASK, adp->fe + ADP_INT_STATUS); 249 250 cur_ctrl = readl(adp->fe + ADP_CTRL); 251 writel(cur_ctrl | ADP_CTRL_VBLANK_ON, adp->fe + ADP_CTRL); 252 } 253 254 static int adp_crtc_enable_vblank(struct drm_crtc *crtc) 255 { 256 struct drm_device *dev = crtc->dev; 257 struct adp_drv_private *adp = to_adp(dev); 258 259 adp_enable_vblank(adp); 260 261 return 0; 262 } 263 264 static void adp_disable_vblank(struct adp_drv_private *adp) 265 { 266 u32 cur_ctrl; 267 268 cur_ctrl = readl(adp->fe + ADP_CTRL); 269 writel(cur_ctrl & ~ADP_CTRL_VBLANK_ON, adp->fe + ADP_CTRL); 270 writel(ADP_INT_STATUS_INT_MASK, adp->fe + ADP_INT_STATUS); 271 } 272 273 static void adp_crtc_disable_vblank(struct drm_crtc *crtc) 274 { 275 struct drm_device *dev = crtc->dev; 276 struct adp_drv_private *adp = to_adp(dev); 277 278 adp_disable_vblank(adp); 279 } 280 281 static void adp_crtc_atomic_enable(struct drm_crtc *crtc, 282 struct drm_atomic_state *state) 283 { 284 struct adp_drv_private *adp = crtc_to_adp(crtc); 285 286 writel(BIT(0), adp->be + ADBE_BLEND_EN2); 287 writel(BIT(4), adp->be + ADBE_BLEND_EN1); 288 writel(BIT(0), adp->be + ADBE_BLEND_EN3); 289 writel(BIT(0), adp->be + ADBE_BLEND_BYPASS); 290 writel(BIT(0), adp->be + ADBE_BLEND_EN4); 291 } 292 293 static void adp_crtc_atomic_disable(struct drm_crtc *crtc, 294 struct drm_atomic_state *state) 295 { 296 struct adp_drv_private *adp = crtc_to_adp(crtc); 297 struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state, crtc); 298 299 drm_atomic_helper_disable_planes_on_crtc(old_state, false); 300 301 writel(0x0, adp->be + ADBE_BLEND_EN2); 302 writel(0x0, adp->be + ADBE_BLEND_EN1); 303 writel(0x0, adp->be + ADBE_BLEND_EN3); 304 writel(0x0, adp->be + ADBE_BLEND_BYPASS); 305 writel(0x0, adp->be + ADBE_BLEND_EN4); 306 drm_crtc_vblank_off(crtc); 307 } 308 309 static void adp_crtc_atomic_flush(struct drm_crtc *crtc, 310 struct drm_atomic_state *state) 311 { 312 u32 frame_num = 1; 313 struct adp_drv_private *adp = crtc_to_adp(crtc); 314 struct drm_crtc_state *new_state = drm_atomic_get_new_crtc_state(state, crtc); 315 u64 new_size = ALIGN(new_state->mode.hdisplay * 316 new_state->mode.vdisplay * 4, PAGE_SIZE); 317 318 if (new_size != adp->mask_buf_size) { 319 if (adp->mask_buf) 320 dma_free_coherent(crtc->dev->dev, adp->mask_buf_size, 321 adp->mask_buf, adp->mask_iova); 322 adp->mask_buf = NULL; 323 if (new_size != 0) { 324 adp->mask_buf = dma_alloc_coherent(crtc->dev->dev, new_size, 325 &adp->mask_iova, GFP_KERNEL); 326 memset(adp->mask_buf, 0xFF, new_size); 327 writel(adp->mask_iova, adp->be + ADBE_MASK_BUF); 328 } 329 adp->mask_buf_size = new_size; 330 } 331 writel(ADBE_FIFO_SYNC | frame_num, adp->be + ADBE_FIFO); 332 //FIXME: use adbe flush interrupt 333 spin_lock_irq(&crtc->dev->event_lock); 334 if (crtc->state->event) { 335 drm_crtc_vblank_get(crtc); 336 adp->event = crtc->state->event; 337 } 338 crtc->state->event = NULL; 339 spin_unlock_irq(&crtc->dev->event_lock); 340 } 341 342 static const struct drm_crtc_funcs adp_crtc_funcs = { 343 .destroy = drm_crtc_cleanup, 344 .set_config = drm_atomic_helper_set_config, 345 .page_flip = drm_atomic_helper_page_flip, 346 .reset = drm_atomic_helper_crtc_reset, 347 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state, 348 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state, 349 .enable_vblank = adp_crtc_enable_vblank, 350 .disable_vblank = adp_crtc_disable_vblank, 351 }; 352 353 354 static const struct drm_crtc_helper_funcs adp_crtc_helper_funcs = { 355 .atomic_enable = adp_crtc_atomic_enable, 356 .atomic_disable = adp_crtc_atomic_disable, 357 .atomic_flush = adp_crtc_atomic_flush, 358 }; 359 360 static int adp_setup_crtc(struct adp_drv_private *adp) 361 { 362 struct drm_device *drm = &adp->drm; 363 struct drm_plane *primary; 364 int ret; 365 366 primary = adp_plane_new(adp); 367 if (IS_ERR(primary)) 368 return PTR_ERR(primary); 369 370 ret = drm_crtc_init_with_planes(drm, &adp->crtc, primary, 371 NULL, &adp_crtc_funcs, NULL); 372 if (ret) 373 return ret; 374 375 drm_crtc_helper_add(&adp->crtc, &adp_crtc_helper_funcs); 376 return 0; 377 } 378 379 static const struct drm_mode_config_funcs adp_mode_config_funcs = { 380 .fb_create = drm_gem_fb_create_with_dirty, 381 .atomic_check = drm_atomic_helper_check, 382 .atomic_commit = drm_atomic_helper_commit, 383 }; 384 385 static int adp_setup_mode_config(struct adp_drv_private *adp) 386 { 387 struct drm_device *drm = &adp->drm; 388 int ret; 389 u32 size; 390 391 ret = drmm_mode_config_init(drm); 392 if (ret) 393 return ret; 394 395 /* 396 * Query screen size restrict the frame buffer size to the screen size 397 * aligned to the next multiple of 64. This is not necessary but can be 398 * used as simple check for non-desktop devices. 399 * Xorg's modesetting driver does not care about the connector 400 * "non-desktop" property. The max frame buffer width or height can be 401 * easily checked and a device can be reject if the max width/height is 402 * smaller than 120 for example. 403 * Any touchbar daemon is not limited by this small framebuffer size. 404 */ 405 size = readl(adp->fe + ADP_SCREEN_SIZE); 406 407 drm->mode_config.min_width = 32; 408 drm->mode_config.min_height = 32; 409 drm->mode_config.max_width = ALIGN(FIELD_GET(ADP_SCREEN_HSIZE, size), 64); 410 drm->mode_config.max_height = ALIGN(FIELD_GET(ADP_SCREEN_VSIZE, size), 64); 411 drm->mode_config.preferred_depth = 24; 412 drm->mode_config.prefer_shadow = 0; 413 drm->mode_config.funcs = &adp_mode_config_funcs; 414 415 ret = adp_setup_crtc(adp); 416 if (ret) { 417 drm_err(drm, "failed to create crtc"); 418 return ret; 419 } 420 421 adp->encoder = drmm_plain_encoder_alloc(drm, NULL, DRM_MODE_ENCODER_DSI, NULL); 422 if (IS_ERR(adp->encoder)) { 423 drm_err(drm, "failed to init encoder"); 424 return PTR_ERR(adp->encoder); 425 } 426 adp->encoder->possible_crtcs = ALL_CRTCS; 427 428 ret = drm_bridge_attach(adp->encoder, adp->next_bridge, NULL, 429 DRM_BRIDGE_ATTACH_NO_CONNECTOR); 430 if (ret) { 431 drm_err(drm, "failed to init bridge chain"); 432 return ret; 433 } 434 435 adp->connector = drm_bridge_connector_init(drm, adp->encoder); 436 if (IS_ERR(adp->connector)) 437 return PTR_ERR(adp->connector); 438 439 drm_connector_attach_encoder(adp->connector, adp->encoder); 440 441 ret = drm_vblank_init(drm, drm->mode_config.num_crtc); 442 if (ret < 0) { 443 drm_err(drm, "failed to initialize vblank"); 444 return ret; 445 } 446 447 drm_mode_config_reset(drm); 448 449 return 0; 450 } 451 452 static int adp_parse_of(struct platform_device *pdev, struct adp_drv_private *adp) 453 { 454 struct device *dev = &pdev->dev; 455 456 adp->be = devm_platform_ioremap_resource_byname(pdev, "be"); 457 if (IS_ERR(adp->be)) { 458 dev_err(dev, "failed to map display backend mmio"); 459 return PTR_ERR(adp->be); 460 } 461 462 adp->fe = devm_platform_ioremap_resource_byname(pdev, "fe"); 463 if (IS_ERR(adp->fe)) { 464 dev_err(dev, "failed to map display pipe mmio"); 465 return PTR_ERR(adp->fe); 466 } 467 468 adp->be_irq = platform_get_irq_byname(pdev, "be"); 469 if (adp->be_irq < 0) 470 return adp->be_irq; 471 472 adp->fe_irq = platform_get_irq_byname(pdev, "fe"); 473 if (adp->fe_irq < 0) 474 return adp->fe_irq; 475 476 return 0; 477 } 478 479 static irqreturn_t adp_fe_irq(int irq, void *arg) 480 { 481 struct adp_drv_private *adp = (struct adp_drv_private *)arg; 482 u32 int_status; 483 u32 int_ctl; 484 485 spin_lock(&adp->irq_lock); 486 487 int_status = readl(adp->fe + ADP_INT_STATUS); 488 if (int_status & ADP_INT_STATUS_VBLANK) { 489 drm_crtc_handle_vblank(&adp->crtc); 490 spin_lock(&adp->crtc.dev->event_lock); 491 if (adp->event) { 492 int_ctl = readl(adp->fe + ADP_CTRL); 493 if ((int_ctl & 0xF00) == 0x600) { 494 drm_crtc_send_vblank_event(&adp->crtc, adp->event); 495 adp->event = NULL; 496 drm_crtc_vblank_put(&adp->crtc); 497 } 498 } 499 spin_unlock(&adp->crtc.dev->event_lock); 500 } 501 502 writel(int_status, adp->fe + ADP_INT_STATUS); 503 504 spin_unlock(&adp->irq_lock); 505 506 return IRQ_HANDLED; 507 } 508 509 static int adp_drm_bind(struct device *dev) 510 { 511 struct drm_device *drm = dev_get_drvdata(dev); 512 struct adp_drv_private *adp = to_adp(drm); 513 int err; 514 515 adp_disable_vblank(adp); 516 writel(ADP_CTRL_FIFO_ON | ADP_CTRL_VBLANK_ON, adp->fe + ADP_CTRL); 517 518 adp->next_bridge = drmm_of_get_bridge(&adp->drm, dev->of_node, 0, 0); 519 if (IS_ERR(adp->next_bridge)) { 520 dev_err(dev, "failed to find next bridge"); 521 return PTR_ERR(adp->next_bridge); 522 } 523 524 err = adp_setup_mode_config(adp); 525 if (err < 0) 526 return err; 527 528 err = request_irq(adp->fe_irq, adp_fe_irq, 0, "adp-fe", adp); 529 if (err) 530 return err; 531 532 err = drm_dev_register(&adp->drm, 0); 533 if (err) 534 return err; 535 536 return 0; 537 } 538 539 static void adp_drm_unbind(struct device *dev) 540 { 541 struct drm_device *drm = dev_get_drvdata(dev); 542 struct adp_drv_private *adp = to_adp(drm); 543 544 drm_dev_unregister(drm); 545 drm_atomic_helper_shutdown(drm); 546 free_irq(adp->fe_irq, adp); 547 } 548 549 static const struct component_master_ops adp_master_ops = { 550 .bind = adp_drm_bind, 551 .unbind = adp_drm_unbind, 552 }; 553 554 static int compare_dev(struct device *dev, void *data) 555 { 556 return dev->of_node == data; 557 } 558 559 static int adp_probe(struct platform_device *pdev) 560 { 561 struct device_node *port; 562 struct component_match *match = NULL; 563 struct adp_drv_private *adp; 564 int err; 565 566 adp = devm_drm_dev_alloc(&pdev->dev, &adp_driver, struct adp_drv_private, drm); 567 if (IS_ERR(adp)) 568 return PTR_ERR(adp); 569 570 spin_lock_init(&adp->irq_lock); 571 572 dev_set_drvdata(&pdev->dev, &adp->drm); 573 574 err = adp_parse_of(pdev, adp); 575 if (err < 0) 576 return err; 577 578 port = of_graph_get_remote_node(pdev->dev.of_node, 0, 0); 579 if (!port) 580 return -ENODEV; 581 582 drm_of_component_match_add(&pdev->dev, &match, compare_dev, port); 583 of_node_put(port); 584 585 return component_master_add_with_match(&pdev->dev, &adp_master_ops, match); 586 } 587 588 static void adp_remove(struct platform_device *pdev) 589 { 590 component_master_del(&pdev->dev, &adp_master_ops); 591 dev_set_drvdata(&pdev->dev, NULL); 592 } 593 594 static const struct of_device_id adp_of_match[] = { 595 { .compatible = "apple,h7-display-pipe", }, 596 { }, 597 }; 598 MODULE_DEVICE_TABLE(of, adp_of_match); 599 600 static struct platform_driver adp_platform_driver = { 601 .driver = { 602 .name = "adp", 603 .of_match_table = adp_of_match, 604 }, 605 .probe = adp_probe, 606 .remove = adp_remove, 607 }; 608 609 module_platform_driver(adp_platform_driver); 610 611 MODULE_DESCRIPTION("Apple Display Pipe DRM driver"); 612 MODULE_LICENSE("GPL"); 613