xref: /linux/drivers/gpu/drm/adp/adp_drv.c (revision 815e260a18a3af4dab59025ee99a7156c0e8b5e0)
1 // SPDX-License-Identifier: GPL-2.0-only
2 
3 #include <linux/component.h>
4 #include <linux/iopoll.h>
5 #include <linux/of.h>
6 #include <linux/platform_device.h>
7 
8 #include <drm/drm_atomic.h>
9 #include <drm/drm_atomic_helper.h>
10 #include <drm/drm_bridge.h>
11 #include <drm/drm_bridge_connector.h>
12 #include <drm/drm_drv.h>
13 #include <drm/drm_fb_dma_helper.h>
14 #include <drm/drm_framebuffer.h>
15 #include <drm/drm_gem_atomic_helper.h>
16 #include <drm/drm_gem_dma_helper.h>
17 #include <drm/drm_gem_framebuffer_helper.h>
18 #include <drm/drm_of.h>
19 #include <drm/drm_print.h>
20 #include <drm/drm_probe_helper.h>
21 #include <drm/drm_vblank.h>
22 
23 #define ADP_INT_STATUS 0x34
24 #define ADP_INT_STATUS_INT_MASK 0x7
25 #define ADP_INT_STATUS_VBLANK 0x1
26 #define ADP_CTRL 0x100
27 #define ADP_CTRL_VBLANK_ON 0x12
28 #define ADP_CTRL_FIFO_ON 0x601
29 #define ADP_SCREEN_SIZE 0x0c
30 #define ADP_SCREEN_HSIZE GENMASK(15, 0)
31 #define ADP_SCREEN_VSIZE GENMASK(31, 16)
32 
33 #define ADBE_FIFO 0x10c0
34 #define ADBE_FIFO_SYNC 0xc0000000
35 
36 #define ADBE_BLEND_BYPASS 0x2020
37 #define ADBE_BLEND_EN1 0x2028
38 #define ADBE_BLEND_EN2 0x2074
39 #define ADBE_BLEND_EN3 0x202c
40 #define ADBE_BLEND_EN4 0x2034
41 #define ADBE_MASK_BUF 0x2200
42 
43 #define ADBE_SRC_START 0x4040
44 #define ADBE_SRC_SIZE 0x4048
45 #define ADBE_DST_START 0x4050
46 #define ADBE_DST_SIZE 0x4054
47 #define ADBE_STRIDE 0x4038
48 #define ADBE_FB_BASE 0x4030
49 
50 #define ADBE_LAYER_EN1 0x4020
51 #define ADBE_LAYER_EN2 0x4068
52 #define ADBE_LAYER_EN3 0x40b4
53 #define ADBE_LAYER_EN4 0x40f4
54 #define ADBE_SCALE_CTL 0x40ac
55 #define ADBE_SCALE_CTL_BYPASS 0x100000
56 
57 #define ADBE_LAYER_CTL 0x1038
58 #define ADBE_LAYER_CTL_ENABLE 0x10000
59 
60 #define ADBE_PIX_FMT 0x402c
61 #define ADBE_PIX_FMT_XRGB32 0x53e4001
62 
63 static int adp_open(struct inode *inode, struct file *filp)
64 {
65 	/*
66 	 * The modesetting driver does not check the non-desktop connector
67 	 * property and keeps the device open and locked. If the touchbar daemon
68 	 * opens the device first, modesetting breaks the whole X session.
69 	 * Simply refuse to open the device for X11 server processes as
70 	 * workaround.
71 	 */
72 	if (current->comm[0] == 'X')
73 		return -EBUSY;
74 
75 	return drm_open(inode, filp);
76 }
77 
78 static const struct file_operations adp_fops = {
79 	.owner          = THIS_MODULE,
80 	.open           = adp_open,
81 	.release        = drm_release,
82 	.unlocked_ioctl = drm_ioctl,
83 	.compat_ioctl   = drm_compat_ioctl,
84 	.poll           = drm_poll,
85 	.read           = drm_read,
86 	.llseek         = noop_llseek,
87 	.mmap           = drm_gem_mmap,
88 	.fop_flags      = FOP_UNSIGNED_OFFSET,
89 	DRM_GEM_DMA_UNMAPPED_AREA_FOPS
90 };
91 
92 static int adp_drm_gem_dumb_create(struct drm_file *file_priv,
93 					struct drm_device *drm,
94 					struct drm_mode_create_dumb *args)
95 {
96 	args->height = ALIGN(args->height, 64);
97 	args->size = args->pitch * args->height;
98 
99 	return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
100 }
101 
102 static const struct drm_driver adp_driver = {
103 	.driver_features = DRIVER_GEM | DRIVER_MODESET | DRIVER_ATOMIC,
104 	.fops = &adp_fops,
105 	DRM_GEM_DMA_DRIVER_OPS_VMAP_WITH_DUMB_CREATE(adp_drm_gem_dumb_create),
106 	.name = "adp",
107 	.desc = "Apple Display Pipe DRM Driver",
108 	.major = 0,
109 	.minor = 1,
110 };
111 
112 struct adp_drv_private {
113 	struct drm_device drm;
114 	struct drm_crtc crtc;
115 	struct drm_encoder *encoder;
116 	struct drm_connector *connector;
117 	struct drm_bridge *next_bridge;
118 	void __iomem *be;
119 	void __iomem *fe;
120 	u32 *mask_buf;
121 	u64 mask_buf_size;
122 	dma_addr_t mask_iova;
123 	int be_irq;
124 	int fe_irq;
125 	struct drm_pending_vblank_event *event;
126 };
127 
128 #define to_adp(x) container_of(x, struct adp_drv_private, drm)
129 #define crtc_to_adp(x) container_of(x, struct adp_drv_private, crtc)
130 
131 static int adp_plane_atomic_check(struct drm_plane *plane,
132 				    struct drm_atomic_state *state)
133 {
134 	struct drm_plane_state *new_plane_state;
135 	struct drm_crtc_state *crtc_state;
136 
137 	new_plane_state = drm_atomic_get_new_plane_state(state, plane);
138 
139 	if (!new_plane_state->crtc)
140 		return 0;
141 
142 	crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc);
143 	if (IS_ERR(crtc_state))
144 		return PTR_ERR(crtc_state);
145 
146 	return drm_atomic_helper_check_plane_state(new_plane_state,
147 						   crtc_state,
148 						   DRM_PLANE_NO_SCALING,
149 						   DRM_PLANE_NO_SCALING,
150 						   true, true);
151 }
152 
153 static void adp_plane_atomic_update(struct drm_plane *plane,
154 				    struct drm_atomic_state *state)
155 {
156 	struct adp_drv_private *adp;
157 	struct drm_rect src_rect;
158 	struct drm_gem_dma_object *obj;
159 	struct drm_framebuffer *fb;
160 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
161 	u32 src_pos, src_size, dst_pos, dst_size;
162 
163 	if (!plane || !new_state)
164 		return;
165 
166 	fb = new_state->fb;
167 	if (!fb)
168 		return;
169 	adp = to_adp(plane->dev);
170 
171 	drm_rect_fp_to_int(&src_rect, &new_state->src);
172 	src_pos = src_rect.x1 << 16 | src_rect.y1;
173 	dst_pos = new_state->dst.x1 << 16 | new_state->dst.y1;
174 	src_size = drm_rect_width(&src_rect) << 16 | drm_rect_height(&src_rect);
175 	dst_size = drm_rect_width(&new_state->dst) << 16 |
176 		drm_rect_height(&new_state->dst);
177 	writel(src_pos, adp->be + ADBE_SRC_START);
178 	writel(src_size, adp->be + ADBE_SRC_SIZE);
179 	writel(dst_pos, adp->be + ADBE_DST_START);
180 	writel(dst_size, adp->be + ADBE_DST_SIZE);
181 	writel(fb->pitches[0], adp->be + ADBE_STRIDE);
182 	obj = drm_fb_dma_get_gem_obj(fb, 0);
183 	if (obj)
184 		writel(obj->dma_addr + fb->offsets[0], adp->be + ADBE_FB_BASE);
185 
186 	writel(BIT(0), adp->be + ADBE_LAYER_EN1);
187 	writel(BIT(0), adp->be + ADBE_LAYER_EN2);
188 	writel(BIT(0), adp->be + ADBE_LAYER_EN3);
189 	writel(BIT(0), adp->be + ADBE_LAYER_EN4);
190 	writel(ADBE_SCALE_CTL_BYPASS, adp->be + ADBE_SCALE_CTL);
191 	writel(ADBE_LAYER_CTL_ENABLE | BIT(0), adp->be + ADBE_LAYER_CTL);
192 	writel(ADBE_PIX_FMT_XRGB32, adp->be + ADBE_PIX_FMT);
193 }
194 
195 static void adp_plane_atomic_disable(struct drm_plane *plane,
196 				     struct drm_atomic_state *state)
197 {
198 	struct adp_drv_private *adp = to_adp(plane->dev);
199 
200 	writel(0x0, adp->be + ADBE_LAYER_EN1);
201 	writel(0x0, adp->be + ADBE_LAYER_EN2);
202 	writel(0x0, adp->be + ADBE_LAYER_EN3);
203 	writel(0x0, adp->be + ADBE_LAYER_EN4);
204 	writel(ADBE_LAYER_CTL_ENABLE, adp->be + ADBE_LAYER_CTL);
205 }
206 
207 static const struct drm_plane_helper_funcs adp_plane_helper_funcs = {
208 	.atomic_check = adp_plane_atomic_check,
209 	.atomic_update = adp_plane_atomic_update,
210 	.atomic_disable = adp_plane_atomic_disable,
211 	DRM_GEM_SHADOW_PLANE_HELPER_FUNCS
212 };
213 
214 static const struct drm_plane_funcs adp_plane_funcs = {
215 	.update_plane = drm_atomic_helper_update_plane,
216 	.disable_plane = drm_atomic_helper_disable_plane,
217 	DRM_GEM_SHADOW_PLANE_FUNCS
218 };
219 
220 static const u32 plane_formats[] = {
221 	DRM_FORMAT_XRGB8888,
222 };
223 
224 #define ALL_CRTCS 1
225 
226 static struct drm_plane *adp_plane_new(struct adp_drv_private *adp)
227 {
228 	struct drm_device *drm = &adp->drm;
229 	struct drm_plane *plane;
230 
231 	plane = __drmm_universal_plane_alloc(drm, sizeof(struct drm_plane), 0,
232 					     ALL_CRTCS, &adp_plane_funcs,
233 					     plane_formats, ARRAY_SIZE(plane_formats),
234 					     NULL, DRM_PLANE_TYPE_PRIMARY, "plane");
235 	if (IS_ERR(plane)) {
236 		drm_err(drm, "failed to allocate plane");
237 		return plane;
238 	}
239 
240 	drm_plane_helper_add(plane, &adp_plane_helper_funcs);
241 	return plane;
242 }
243 
244 static void adp_enable_vblank(struct adp_drv_private *adp)
245 {
246 	u32 cur_ctrl;
247 
248 	writel(ADP_INT_STATUS_INT_MASK, adp->fe + ADP_INT_STATUS);
249 
250 	cur_ctrl = readl(adp->fe + ADP_CTRL);
251 	writel(cur_ctrl | ADP_CTRL_VBLANK_ON, adp->fe + ADP_CTRL);
252 }
253 
254 static int adp_crtc_enable_vblank(struct drm_crtc *crtc)
255 {
256 	struct drm_device *dev = crtc->dev;
257 	struct adp_drv_private *adp = to_adp(dev);
258 
259 	adp_enable_vblank(adp);
260 
261 	return 0;
262 }
263 
264 static void adp_disable_vblank(struct adp_drv_private *adp)
265 {
266 	u32 cur_ctrl;
267 
268 	cur_ctrl = readl(adp->fe + ADP_CTRL);
269 	writel(cur_ctrl & ~ADP_CTRL_VBLANK_ON, adp->fe + ADP_CTRL);
270 	writel(ADP_INT_STATUS_INT_MASK, adp->fe + ADP_INT_STATUS);
271 }
272 
273 static void adp_crtc_disable_vblank(struct drm_crtc *crtc)
274 {
275 	struct drm_device *dev = crtc->dev;
276 	struct adp_drv_private *adp = to_adp(dev);
277 
278 	adp_disable_vblank(adp);
279 }
280 
281 static void adp_crtc_atomic_enable(struct drm_crtc *crtc,
282 				   struct drm_atomic_state *state)
283 {
284 	struct adp_drv_private *adp = crtc_to_adp(crtc);
285 
286 	writel(BIT(0), adp->be + ADBE_BLEND_EN2);
287 	writel(BIT(4), adp->be + ADBE_BLEND_EN1);
288 	writel(BIT(0), adp->be + ADBE_BLEND_EN3);
289 	writel(BIT(0), adp->be + ADBE_BLEND_BYPASS);
290 	writel(BIT(0), adp->be + ADBE_BLEND_EN4);
291 	drm_crtc_vblank_on(crtc);
292 }
293 
294 static void adp_crtc_atomic_disable(struct drm_crtc *crtc,
295 				    struct drm_atomic_state *state)
296 {
297 	struct adp_drv_private *adp = crtc_to_adp(crtc);
298 	struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state, crtc);
299 
300 	drm_atomic_helper_disable_planes_on_crtc(old_state, false);
301 
302 	writel(0x0, adp->be + ADBE_BLEND_EN2);
303 	writel(0x0, adp->be + ADBE_BLEND_EN1);
304 	writel(0x0, adp->be + ADBE_BLEND_EN3);
305 	writel(0x0, adp->be + ADBE_BLEND_BYPASS);
306 	writel(0x0, adp->be + ADBE_BLEND_EN4);
307 	drm_crtc_vblank_off(crtc);
308 }
309 
310 static void adp_crtc_atomic_flush(struct drm_crtc *crtc,
311 				  struct drm_atomic_state *state)
312 {
313 	u32 frame_num = 1;
314 	unsigned long flags;
315 	struct adp_drv_private *adp = crtc_to_adp(crtc);
316 	struct drm_crtc_state *new_state = drm_atomic_get_new_crtc_state(state, crtc);
317 	u64 new_size = ALIGN(new_state->mode.hdisplay *
318 			     new_state->mode.vdisplay * 4, PAGE_SIZE);
319 
320 	if (new_size != adp->mask_buf_size) {
321 		if (adp->mask_buf)
322 			dma_free_coherent(crtc->dev->dev, adp->mask_buf_size,
323 					  adp->mask_buf, adp->mask_iova);
324 		adp->mask_buf = NULL;
325 		if (new_size != 0) {
326 			adp->mask_buf = dma_alloc_coherent(crtc->dev->dev, new_size,
327 							   &adp->mask_iova, GFP_KERNEL);
328 			memset(adp->mask_buf, 0xFF, new_size);
329 			writel(adp->mask_iova, adp->be + ADBE_MASK_BUF);
330 		}
331 		adp->mask_buf_size = new_size;
332 	}
333 	writel(ADBE_FIFO_SYNC | frame_num, adp->be + ADBE_FIFO);
334 	//FIXME: use adbe flush interrupt
335 	if (crtc->state->event) {
336 		struct drm_pending_vblank_event *event = crtc->state->event;
337 
338 		crtc->state->event = NULL;
339 		spin_lock_irqsave(&crtc->dev->event_lock, flags);
340 
341 		if (drm_crtc_vblank_get(crtc) != 0)
342 			drm_crtc_send_vblank_event(crtc, event);
343 		else
344 			adp->event = event;
345 
346 		spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
347 	}
348 }
349 
350 static const struct drm_crtc_funcs adp_crtc_funcs = {
351 	.destroy = drm_crtc_cleanup,
352 	.set_config = drm_atomic_helper_set_config,
353 	.page_flip = drm_atomic_helper_page_flip,
354 	.reset = drm_atomic_helper_crtc_reset,
355 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
356 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
357 	.enable_vblank = adp_crtc_enable_vblank,
358 	.disable_vblank = adp_crtc_disable_vblank,
359 };
360 
361 
362 static const struct drm_crtc_helper_funcs adp_crtc_helper_funcs = {
363 	.atomic_enable = adp_crtc_atomic_enable,
364 	.atomic_disable = adp_crtc_atomic_disable,
365 	.atomic_flush = adp_crtc_atomic_flush,
366 };
367 
368 static int adp_setup_crtc(struct adp_drv_private *adp)
369 {
370 	struct drm_device *drm = &adp->drm;
371 	struct drm_plane *primary;
372 	int ret;
373 
374 	primary = adp_plane_new(adp);
375 	if (IS_ERR(primary))
376 		return PTR_ERR(primary);
377 
378 	ret = drm_crtc_init_with_planes(drm, &adp->crtc, primary,
379 					NULL, &adp_crtc_funcs, NULL);
380 	if (ret)
381 		return ret;
382 
383 	drm_crtc_helper_add(&adp->crtc, &adp_crtc_helper_funcs);
384 	return 0;
385 }
386 
387 static const struct drm_mode_config_funcs adp_mode_config_funcs = {
388 	.fb_create = drm_gem_fb_create_with_dirty,
389 	.atomic_check = drm_atomic_helper_check,
390 	.atomic_commit = drm_atomic_helper_commit,
391 };
392 
393 static int adp_setup_mode_config(struct adp_drv_private *adp)
394 {
395 	struct drm_device *drm = &adp->drm;
396 	int ret;
397 	u32 size;
398 
399 	ret = drmm_mode_config_init(drm);
400 	if (ret)
401 		return ret;
402 
403 	/*
404 	 * Query screen size restrict the frame buffer size to the screen size
405 	 * aligned to the next multiple of 64. This is not necessary but can be
406 	 * used as simple check for non-desktop devices.
407 	 * Xorg's modesetting driver does not care about the connector
408 	 * "non-desktop" property. The max frame buffer width or height can be
409 	 * easily checked and a device can be reject if the max width/height is
410 	 * smaller than 120 for example.
411 	 * Any touchbar daemon is not limited by this small framebuffer size.
412 	 */
413 	size = readl(adp->fe + ADP_SCREEN_SIZE);
414 
415 	drm->mode_config.min_width = 32;
416 	drm->mode_config.min_height = 32;
417 	drm->mode_config.max_width = ALIGN(FIELD_GET(ADP_SCREEN_HSIZE, size), 64);
418 	drm->mode_config.max_height = ALIGN(FIELD_GET(ADP_SCREEN_VSIZE, size), 64);
419 	drm->mode_config.preferred_depth = 24;
420 	drm->mode_config.prefer_shadow = 0;
421 	drm->mode_config.funcs = &adp_mode_config_funcs;
422 
423 	ret = adp_setup_crtc(adp);
424 	if (ret) {
425 		drm_err(drm, "failed to create crtc");
426 		return ret;
427 	}
428 
429 	adp->encoder = drmm_plain_encoder_alloc(drm, NULL, DRM_MODE_ENCODER_DSI, NULL);
430 	if (IS_ERR(adp->encoder)) {
431 		drm_err(drm, "failed to init encoder");
432 		return PTR_ERR(adp->encoder);
433 	}
434 	adp->encoder->possible_crtcs = ALL_CRTCS;
435 
436 	ret = drm_bridge_attach(adp->encoder, adp->next_bridge, NULL,
437 				DRM_BRIDGE_ATTACH_NO_CONNECTOR);
438 	if (ret) {
439 		drm_err(drm, "failed to init bridge chain");
440 		return ret;
441 	}
442 
443 	adp->connector = drm_bridge_connector_init(drm, adp->encoder);
444 	if (IS_ERR(adp->connector))
445 		return PTR_ERR(adp->connector);
446 
447 	drm_connector_attach_encoder(adp->connector, adp->encoder);
448 
449 	ret = drm_vblank_init(drm, drm->mode_config.num_crtc);
450 	if (ret < 0) {
451 		drm_err(drm, "failed to initialize vblank");
452 		return ret;
453 	}
454 
455 	drm_mode_config_reset(drm);
456 
457 	return 0;
458 }
459 
460 static int adp_parse_of(struct platform_device *pdev, struct adp_drv_private *adp)
461 {
462 	struct device *dev = &pdev->dev;
463 
464 	adp->be = devm_platform_ioremap_resource_byname(pdev, "be");
465 	if (IS_ERR(adp->be)) {
466 		dev_err(dev, "failed to map display backend mmio");
467 		return PTR_ERR(adp->be);
468 	}
469 
470 	adp->fe = devm_platform_ioremap_resource_byname(pdev, "fe");
471 	if (IS_ERR(adp->fe)) {
472 		dev_err(dev, "failed to map display pipe mmio");
473 		return PTR_ERR(adp->fe);
474 	}
475 
476 	adp->be_irq = platform_get_irq_byname(pdev, "be");
477 	if (adp->be_irq < 0)
478 		return adp->be_irq;
479 
480 	adp->fe_irq = platform_get_irq_byname(pdev, "fe");
481 	if (adp->fe_irq < 0)
482 		return adp->fe_irq;
483 
484 	return 0;
485 }
486 
487 static irqreturn_t adp_fe_irq(int irq, void *arg)
488 {
489 	struct adp_drv_private *adp = (struct adp_drv_private *)arg;
490 	u32 int_status;
491 	u32 int_ctl;
492 
493 	int_status = readl(adp->fe + ADP_INT_STATUS);
494 	if (int_status & ADP_INT_STATUS_VBLANK) {
495 		drm_crtc_handle_vblank(&adp->crtc);
496 		spin_lock(&adp->crtc.dev->event_lock);
497 		if (adp->event) {
498 			int_ctl = readl(adp->fe + ADP_CTRL);
499 			if ((int_ctl & 0xF00) == 0x600) {
500 				drm_crtc_send_vblank_event(&adp->crtc, adp->event);
501 				adp->event = NULL;
502 				drm_crtc_vblank_put(&adp->crtc);
503 			}
504 		}
505 		spin_unlock(&adp->crtc.dev->event_lock);
506 	}
507 
508 	writel(int_status, adp->fe + ADP_INT_STATUS);
509 
510 
511 	return IRQ_HANDLED;
512 }
513 
514 static int adp_drm_bind(struct device *dev)
515 {
516 	struct drm_device *drm = dev_get_drvdata(dev);
517 	struct adp_drv_private *adp = to_adp(drm);
518 	int err;
519 
520 	writel(ADP_CTRL_FIFO_ON, adp->fe + ADP_CTRL);
521 
522 	adp->next_bridge = drmm_of_get_bridge(&adp->drm, dev->of_node, 0, 0);
523 	if (IS_ERR(adp->next_bridge)) {
524 		dev_err(dev, "failed to find next bridge");
525 		return PTR_ERR(adp->next_bridge);
526 	}
527 
528 	err = adp_setup_mode_config(adp);
529 	if (err < 0)
530 		return err;
531 
532 	err = request_irq(adp->fe_irq, adp_fe_irq, 0, "adp-fe", adp);
533 	if (err)
534 		return err;
535 
536 	err = drm_dev_register(&adp->drm, 0);
537 	if (err)
538 		return err;
539 
540 	return 0;
541 }
542 
543 static void adp_drm_unbind(struct device *dev)
544 {
545 	struct drm_device *drm = dev_get_drvdata(dev);
546 	struct adp_drv_private *adp = to_adp(drm);
547 
548 	drm_dev_unregister(drm);
549 	drm_atomic_helper_shutdown(drm);
550 	free_irq(adp->fe_irq, adp);
551 }
552 
553 static const struct component_master_ops adp_master_ops = {
554 	.bind	= adp_drm_bind,
555 	.unbind = adp_drm_unbind,
556 };
557 
558 static int compare_dev(struct device *dev, void *data)
559 {
560 	return dev->of_node == data;
561 }
562 
563 static int adp_probe(struct platform_device *pdev)
564 {
565 	struct device_node *port;
566 	struct component_match *match = NULL;
567 	struct adp_drv_private *adp;
568 	int err;
569 
570 	adp = devm_drm_dev_alloc(&pdev->dev, &adp_driver, struct adp_drv_private, drm);
571 	if (IS_ERR(adp))
572 		return PTR_ERR(adp);
573 
574 	dev_set_drvdata(&pdev->dev, &adp->drm);
575 
576 	err = adp_parse_of(pdev, adp);
577 	if (err < 0)
578 		return err;
579 
580 	port = of_graph_get_remote_node(pdev->dev.of_node, 0, 0);
581 	if (!port)
582 		return -ENODEV;
583 
584 	drm_of_component_match_add(&pdev->dev, &match, compare_dev, port);
585 	of_node_put(port);
586 
587 	return component_master_add_with_match(&pdev->dev, &adp_master_ops, match);
588 }
589 
590 static void adp_remove(struct platform_device *pdev)
591 {
592 	component_master_del(&pdev->dev, &adp_master_ops);
593 	dev_set_drvdata(&pdev->dev, NULL);
594 }
595 
596 static const struct of_device_id adp_of_match[] = {
597 	{ .compatible = "apple,h7-display-pipe", },
598 	{ },
599 };
600 MODULE_DEVICE_TABLE(of, adp_of_match);
601 
602 static struct platform_driver adp_platform_driver = {
603 	.driver = {
604 		.name = "adp",
605 		.of_match_table = adp_of_match,
606 	},
607 	.probe = adp_probe,
608 	.remove = adp_remove,
609 };
610 
611 module_platform_driver(adp_platform_driver);
612 
613 MODULE_DESCRIPTION("Apple Display Pipe DRM driver");
614 MODULE_LICENSE("GPL");
615