xref: /linux/drivers/gpu/drm/gud/gud_drv.c (revision bf4afc53b77aeaa48b5409da5c8da6bb4eff7f43)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright 2020 Noralf Trønnes
4  */
5 
6 #include <linux/dma-buf.h>
7 #include <linux/dma-mapping.h>
8 #include <linux/lz4.h>
9 #include <linux/module.h>
10 #include <linux/platform_device.h>
11 #include <linux/string_helpers.h>
12 #include <linux/usb.h>
13 #include <linux/vmalloc.h>
14 #include <linux/workqueue.h>
15 
16 #include <drm/clients/drm_client_setup.h>
17 #include <drm/drm_atomic_helper.h>
18 #include <drm/drm_blend.h>
19 #include <drm/drm_crtc_helper.h>
20 #include <drm/drm_damage_helper.h>
21 #include <drm/drm_debugfs.h>
22 #include <drm/drm_drv.h>
23 #include <drm/drm_fbdev_shmem.h>
24 #include <drm/drm_fourcc.h>
25 #include <drm/drm_gem_atomic_helper.h>
26 #include <drm/drm_gem_framebuffer_helper.h>
27 #include <drm/drm_gem_shmem_helper.h>
28 #include <drm/drm_managed.h>
29 #include <drm/drm_print.h>
30 #include <drm/drm_probe_helper.h>
31 #include <drm/gud.h>
32 
33 #include "gud_internal.h"
34 
35 /* Only used internally */
36 static const struct drm_format_info gud_drm_format_r1 = {
37 	.format = GUD_DRM_FORMAT_R1,
38 	.num_planes = 1,
39 	.char_per_block = { 1, 0, 0 },
40 	.block_w = { 8, 0, 0 },
41 	.block_h = { 1, 0, 0 },
42 	.hsub = 1,
43 	.vsub = 1,
44 };
45 
46 static const struct drm_format_info gud_drm_format_xrgb1111 = {
47 	.format = GUD_DRM_FORMAT_XRGB1111,
48 	.num_planes = 1,
49 	.char_per_block = { 1, 0, 0 },
50 	.block_w = { 2, 0, 0 },
51 	.block_h = { 1, 0, 0 },
52 	.hsub = 1,
53 	.vsub = 1,
54 };
55 
56 static int gud_usb_control_msg(struct usb_interface *intf, bool in,
57 			       u8 request, u16 value, void *buf, size_t len)
58 {
59 	u8 requesttype = USB_TYPE_VENDOR | USB_RECIP_INTERFACE;
60 	u8 ifnum = intf->cur_altsetting->desc.bInterfaceNumber;
61 	struct usb_device *usb = interface_to_usbdev(intf);
62 	unsigned int pipe;
63 
64 	if (len && !buf)
65 		return -EINVAL;
66 
67 	if (in) {
68 		pipe = usb_rcvctrlpipe(usb, 0);
69 		requesttype |= USB_DIR_IN;
70 	} else {
71 		pipe = usb_sndctrlpipe(usb, 0);
72 		requesttype |= USB_DIR_OUT;
73 	}
74 
75 	return usb_control_msg(usb, pipe, request, requesttype, value,
76 			       ifnum, buf, len, USB_CTRL_GET_TIMEOUT);
77 }
78 
79 static int gud_get_display_descriptor(struct usb_interface *intf,
80 				      struct gud_display_descriptor_req *desc)
81 {
82 	void *buf;
83 	int ret;
84 
85 	buf = kmalloc_obj(*desc);
86 	if (!buf)
87 		return -ENOMEM;
88 
89 	ret = gud_usb_control_msg(intf, true, GUD_REQ_GET_DESCRIPTOR, 0, buf, sizeof(*desc));
90 	memcpy(desc, buf, sizeof(*desc));
91 	kfree(buf);
92 	if (ret < 0)
93 		return ret;
94 	if (ret != sizeof(*desc))
95 		return -EIO;
96 
97 	if (desc->magic != le32_to_cpu(GUD_DISPLAY_MAGIC))
98 		return -ENODATA;
99 
100 	DRM_DEV_DEBUG_DRIVER(&intf->dev,
101 			     "version=%u flags=0x%x compression=0x%x max_buffer_size=%u\n",
102 			     desc->version, le32_to_cpu(desc->flags), desc->compression,
103 			     le32_to_cpu(desc->max_buffer_size));
104 
105 	if (!desc->version || !desc->max_width || !desc->max_height ||
106 	    le32_to_cpu(desc->min_width) > le32_to_cpu(desc->max_width) ||
107 	    le32_to_cpu(desc->min_height) > le32_to_cpu(desc->max_height))
108 		return -EINVAL;
109 
110 	return 0;
111 }
112 
113 static int gud_status_to_errno(u8 status)
114 {
115 	switch (status) {
116 	case GUD_STATUS_OK:
117 		return 0;
118 	case GUD_STATUS_BUSY:
119 		return -EBUSY;
120 	case GUD_STATUS_REQUEST_NOT_SUPPORTED:
121 		return -EOPNOTSUPP;
122 	case GUD_STATUS_PROTOCOL_ERROR:
123 		return -EPROTO;
124 	case GUD_STATUS_INVALID_PARAMETER:
125 		return -EINVAL;
126 	case GUD_STATUS_ERROR:
127 		return -EREMOTEIO;
128 	default:
129 		return -EREMOTEIO;
130 	}
131 }
132 
133 static int gud_usb_get_status(struct usb_interface *intf)
134 {
135 	int ret, status = -EIO;
136 	u8 *buf;
137 
138 	buf = kmalloc_obj(*buf);
139 	if (!buf)
140 		return -ENOMEM;
141 
142 	ret = gud_usb_control_msg(intf, true, GUD_REQ_GET_STATUS, 0, buf, sizeof(*buf));
143 	if (ret == sizeof(*buf))
144 		status = gud_status_to_errno(*buf);
145 	kfree(buf);
146 
147 	if (ret < 0)
148 		return ret;
149 
150 	return status;
151 }
152 
153 static int gud_usb_transfer(struct gud_device *gdrm, bool in, u8 request, u16 index,
154 			    void *buf, size_t len)
155 {
156 	struct usb_interface *intf = to_usb_interface(gdrm->drm.dev);
157 	int idx, ret;
158 
159 	drm_dbg(&gdrm->drm, "%s: request=0x%x index=%u len=%zu\n",
160 		in ? "get" : "set", request, index, len);
161 
162 	if (!drm_dev_enter(&gdrm->drm, &idx))
163 		return -ENODEV;
164 
165 	mutex_lock(&gdrm->ctrl_lock);
166 
167 	ret = gud_usb_control_msg(intf, in, request, index, buf, len);
168 	if (ret == -EPIPE || ((gdrm->flags & GUD_DISPLAY_FLAG_STATUS_ON_SET) && !in && ret >= 0)) {
169 		int status;
170 
171 		status = gud_usb_get_status(intf);
172 		if (status < 0) {
173 			ret = status;
174 		} else if (ret < 0) {
175 			dev_err_once(gdrm->drm.dev,
176 				     "Unexpected status OK for failed transfer\n");
177 			ret = -EPIPE;
178 		}
179 	}
180 
181 	if (ret < 0) {
182 		drm_dbg(&gdrm->drm, "ret=%d\n", ret);
183 		gdrm->stats_num_errors++;
184 	}
185 
186 	mutex_unlock(&gdrm->ctrl_lock);
187 	drm_dev_exit(idx);
188 
189 	return ret;
190 }
191 
192 /*
193  * @buf cannot be allocated on the stack.
194  * Returns number of bytes received or negative error code on failure.
195  */
196 int gud_usb_get(struct gud_device *gdrm, u8 request, u16 index, void *buf, size_t max_len)
197 {
198 	return gud_usb_transfer(gdrm, true, request, index, buf, max_len);
199 }
200 
201 /*
202  * @buf can be allocated on the stack or NULL.
203  * Returns zero on success or negative error code on failure.
204  */
205 int gud_usb_set(struct gud_device *gdrm, u8 request, u16 index, void *buf, size_t len)
206 {
207 	void *trbuf = NULL;
208 	int ret;
209 
210 	if (buf && len) {
211 		trbuf = kmemdup(buf, len, GFP_KERNEL);
212 		if (!trbuf)
213 			return -ENOMEM;
214 	}
215 
216 	ret = gud_usb_transfer(gdrm, false, request, index, trbuf, len);
217 	kfree(trbuf);
218 	if (ret < 0)
219 		return ret;
220 
221 	return ret != len ? -EIO : 0;
222 }
223 
224 /*
225  * @val can be allocated on the stack.
226  * Returns zero on success or negative error code on failure.
227  */
228 int gud_usb_get_u8(struct gud_device *gdrm, u8 request, u16 index, u8 *val)
229 {
230 	u8 *buf;
231 	int ret;
232 
233 	buf = kmalloc(sizeof(*val), GFP_KERNEL);
234 	if (!buf)
235 		return -ENOMEM;
236 
237 	ret = gud_usb_get(gdrm, request, index, buf, sizeof(*val));
238 	*val = *buf;
239 	kfree(buf);
240 	if (ret < 0)
241 		return ret;
242 
243 	return ret != sizeof(*val) ? -EIO : 0;
244 }
245 
246 /* Returns zero on success or negative error code on failure. */
247 int gud_usb_set_u8(struct gud_device *gdrm, u8 request, u8 val)
248 {
249 	return gud_usb_set(gdrm, request, 0, &val, sizeof(val));
250 }
251 
252 static int gud_plane_add_properties(struct gud_device *gdrm)
253 {
254 	struct gud_property_req *properties;
255 	unsigned int i, num_properties;
256 	int ret;
257 
258 	properties = kzalloc_objs(*properties, GUD_PROPERTIES_MAX_NUM,
259 				  GFP_KERNEL);
260 	if (!properties)
261 		return -ENOMEM;
262 
263 	ret = gud_usb_get(gdrm, GUD_REQ_GET_PROPERTIES, 0,
264 			  properties, GUD_PROPERTIES_MAX_NUM * sizeof(*properties));
265 	if (ret <= 0)
266 		goto out;
267 	if (ret % sizeof(*properties)) {
268 		ret = -EIO;
269 		goto out;
270 	}
271 
272 	num_properties = ret / sizeof(*properties);
273 	ret = 0;
274 
275 	gdrm->properties = drmm_kcalloc(&gdrm->drm, num_properties, sizeof(*gdrm->properties),
276 					GFP_KERNEL);
277 	if (!gdrm->properties) {
278 		ret = -ENOMEM;
279 		goto out;
280 	}
281 
282 	for (i = 0; i < num_properties; i++) {
283 		u16 prop = le16_to_cpu(properties[i].prop);
284 		u64 val = le64_to_cpu(properties[i].val);
285 
286 		switch (prop) {
287 		case GUD_PROPERTY_ROTATION:
288 			/*
289 			 * DRM UAPI matches the protocol so use the value directly,
290 			 * but mask out any additions on future devices.
291 			 */
292 			val &= GUD_ROTATION_MASK;
293 			ret = drm_plane_create_rotation_property(&gdrm->plane,
294 								 DRM_MODE_ROTATE_0, val);
295 			break;
296 		default:
297 			/* New ones might show up in future devices, skip those we don't know. */
298 			drm_dbg(&gdrm->drm, "Ignoring unknown property: %u\n", prop);
299 			continue;
300 		}
301 
302 		if (ret)
303 			goto out;
304 
305 		gdrm->properties[gdrm->num_properties++] = prop;
306 	}
307 out:
308 	kfree(properties);
309 
310 	return ret;
311 }
312 
313 static int gud_stats_debugfs(struct seq_file *m, void *data)
314 {
315 	struct drm_debugfs_entry *entry = m->private;
316 	struct gud_device *gdrm = to_gud_device(entry->dev);
317 	char buf[10];
318 
319 	string_get_size(gdrm->bulk_len, 1, STRING_UNITS_2, buf, sizeof(buf));
320 	seq_printf(m, "Max buffer size: %s\n", buf);
321 	seq_printf(m, "Number of errors:  %u\n", gdrm->stats_num_errors);
322 
323 	seq_puts(m, "Compression:      ");
324 	if (gdrm->compression & GUD_COMPRESSION_LZ4)
325 		seq_puts(m, " lz4");
326 	if (!gdrm->compression)
327 		seq_puts(m, " none");
328 	seq_puts(m, "\n");
329 
330 	if (gdrm->compression) {
331 		u64 remainder;
332 		u64 ratio = div64_u64_rem(gdrm->stats_length, gdrm->stats_actual_length,
333 					  &remainder);
334 		u64 ratio_frac = div64_u64(remainder * 10, gdrm->stats_actual_length);
335 
336 		seq_printf(m, "Compression ratio: %llu.%llu\n", ratio, ratio_frac);
337 	}
338 
339 	return 0;
340 }
341 
342 static const struct drm_crtc_helper_funcs gud_crtc_helper_funcs = {
343 	.atomic_check = drm_crtc_helper_atomic_check
344 };
345 
346 static const struct drm_crtc_funcs gud_crtc_funcs = {
347 	.reset = drm_atomic_helper_crtc_reset,
348 	.destroy = drm_crtc_cleanup,
349 	.set_config = drm_atomic_helper_set_config,
350 	.page_flip = drm_atomic_helper_page_flip,
351 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
352 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
353 };
354 
355 static const struct drm_plane_helper_funcs gud_plane_helper_funcs = {
356 	DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
357 	.atomic_check = gud_plane_atomic_check,
358 	.atomic_update = gud_plane_atomic_update,
359 };
360 
361 static const struct drm_plane_funcs gud_plane_funcs = {
362 	.update_plane = drm_atomic_helper_update_plane,
363 	.disable_plane = drm_atomic_helper_disable_plane,
364 	.destroy = drm_plane_cleanup,
365 	DRM_GEM_SHADOW_PLANE_FUNCS,
366 };
367 
368 static const struct drm_mode_config_funcs gud_mode_config_funcs = {
369 	.fb_create = drm_gem_fb_create_with_dirty,
370 	.atomic_check = drm_atomic_helper_check,
371 	.atomic_commit = drm_atomic_helper_commit,
372 };
373 
374 static const u64 gud_plane_modifiers[] = {
375 	DRM_FORMAT_MOD_LINEAR,
376 	DRM_FORMAT_MOD_INVALID
377 };
378 
379 DEFINE_DRM_GEM_FOPS(gud_fops);
380 
381 static const struct drm_driver gud_drm_driver = {
382 	.driver_features	= DRIVER_MODESET | DRIVER_GEM | DRIVER_ATOMIC,
383 	.fops			= &gud_fops,
384 	DRM_GEM_SHMEM_DRIVER_OPS,
385 	DRM_FBDEV_SHMEM_DRIVER_OPS,
386 
387 	.name			= "gud",
388 	.desc			= "Generic USB Display",
389 	.major			= 1,
390 	.minor			= 0,
391 };
392 
393 static int gud_alloc_bulk_buffer(struct gud_device *gdrm)
394 {
395 	unsigned int i, num_pages;
396 	struct page **pages;
397 	void *ptr;
398 	int ret;
399 
400 	gdrm->bulk_buf = vmalloc_32(gdrm->bulk_len);
401 	if (!gdrm->bulk_buf)
402 		return -ENOMEM;
403 
404 	num_pages = DIV_ROUND_UP(gdrm->bulk_len, PAGE_SIZE);
405 	pages = kmalloc_objs(struct page *, num_pages);
406 	if (!pages)
407 		return -ENOMEM;
408 
409 	for (i = 0, ptr = gdrm->bulk_buf; i < num_pages; i++, ptr += PAGE_SIZE)
410 		pages[i] = vmalloc_to_page(ptr);
411 
412 	ret = sg_alloc_table_from_pages(&gdrm->bulk_sgt, pages, num_pages,
413 					0, gdrm->bulk_len, GFP_KERNEL);
414 	kfree(pages);
415 
416 	return ret;
417 }
418 
419 static void gud_free_buffers_and_mutex(void *data)
420 {
421 	struct gud_device *gdrm = data;
422 
423 	vfree(gdrm->compress_buf);
424 	gdrm->compress_buf = NULL;
425 	sg_free_table(&gdrm->bulk_sgt);
426 	vfree(gdrm->bulk_buf);
427 	gdrm->bulk_buf = NULL;
428 	mutex_destroy(&gdrm->ctrl_lock);
429 }
430 
431 static int gud_probe(struct usb_interface *intf, const struct usb_device_id *id)
432 {
433 	const struct drm_format_info *xrgb8888_emulation_format = NULL;
434 	bool rgb565_supported = false, xrgb8888_supported = false;
435 	unsigned int num_formats_dev, num_formats = 0;
436 	struct usb_endpoint_descriptor *bulk_out;
437 	struct gud_display_descriptor_req desc;
438 	struct device *dev = &intf->dev;
439 	size_t max_buffer_size = 0;
440 	struct gud_device *gdrm;
441 	struct drm_device *drm;
442 	struct device *dma_dev;
443 	u8 *formats_dev;
444 	u32 *formats;
445 	int ret, i;
446 
447 	ret = usb_find_bulk_out_endpoint(intf->cur_altsetting, &bulk_out);
448 	if (ret)
449 		return ret;
450 
451 	ret = gud_get_display_descriptor(intf, &desc);
452 	if (ret) {
453 		DRM_DEV_DEBUG_DRIVER(dev, "Not a display interface: ret=%d\n", ret);
454 		return -ENODEV;
455 	}
456 
457 	if (desc.version > 1) {
458 		dev_err(dev, "Protocol version %u is not supported\n", desc.version);
459 		return -ENODEV;
460 	}
461 
462 	gdrm = devm_drm_dev_alloc(dev, &gud_drm_driver, struct gud_device, drm);
463 	if (IS_ERR(gdrm))
464 		return PTR_ERR(gdrm);
465 
466 	drm = &gdrm->drm;
467 
468 	gdrm->flags = le32_to_cpu(desc.flags);
469 	gdrm->compression = desc.compression & GUD_COMPRESSION_LZ4;
470 
471 	if (gdrm->flags & GUD_DISPLAY_FLAG_FULL_UPDATE && gdrm->compression)
472 		return -EINVAL;
473 
474 	mutex_init(&gdrm->ctrl_lock);
475 	mutex_init(&gdrm->damage_lock);
476 	INIT_WORK(&gdrm->work, gud_flush_work);
477 	gud_clear_damage(gdrm);
478 
479 	ret = devm_add_action(dev, gud_free_buffers_and_mutex, gdrm);
480 	if (ret)
481 		return ret;
482 
483 	usb_set_intfdata(intf, gdrm);
484 
485 	dma_dev = usb_intf_get_dma_device(intf);
486 	if (dma_dev) {
487 		drm_dev_set_dma_dev(drm, dma_dev);
488 		put_device(dma_dev);
489 	} else {
490 		dev_warn(dev, "buffer sharing not supported"); /* not an error */
491 	}
492 
493 	/* Mode config init */
494 	ret = drmm_mode_config_init(drm);
495 	if (ret)
496 		return ret;
497 
498 	drm->mode_config.min_width = le32_to_cpu(desc.min_width);
499 	drm->mode_config.max_width = le32_to_cpu(desc.max_width);
500 	drm->mode_config.min_height = le32_to_cpu(desc.min_height);
501 	drm->mode_config.max_height = le32_to_cpu(desc.max_height);
502 	drm->mode_config.funcs = &gud_mode_config_funcs;
503 
504 	/* Format init */
505 	formats_dev = devm_kmalloc(dev, GUD_FORMATS_MAX_NUM, GFP_KERNEL);
506 	/* Add room for emulated XRGB8888 */
507 	formats = devm_kmalloc_array(dev, GUD_FORMATS_MAX_NUM + 1, sizeof(*formats), GFP_KERNEL);
508 	if (!formats_dev || !formats)
509 		return -ENOMEM;
510 
511 	ret = gud_usb_get(gdrm, GUD_REQ_GET_FORMATS, 0, formats_dev, GUD_FORMATS_MAX_NUM);
512 	if (ret < 0)
513 		return ret;
514 
515 	num_formats_dev = ret;
516 	for (i = 0; i < num_formats_dev; i++) {
517 		const struct drm_format_info *info;
518 		size_t fmt_buf_size;
519 		u32 format;
520 
521 		format = gud_to_fourcc(formats_dev[i]);
522 		if (!format) {
523 			drm_dbg(drm, "Unsupported format: 0x%02x\n", formats_dev[i]);
524 			continue;
525 		}
526 
527 		if (format == GUD_DRM_FORMAT_R1)
528 			info = &gud_drm_format_r1;
529 		else if (format == GUD_DRM_FORMAT_XRGB1111)
530 			info = &gud_drm_format_xrgb1111;
531 		else
532 			info = drm_format_info(format);
533 
534 		switch (format) {
535 		case GUD_DRM_FORMAT_R1:
536 			fallthrough;
537 		case DRM_FORMAT_R8:
538 			fallthrough;
539 		case GUD_DRM_FORMAT_XRGB1111:
540 			fallthrough;
541 		case DRM_FORMAT_RGB332:
542 			fallthrough;
543 		case DRM_FORMAT_RGB888:
544 			if (!xrgb8888_emulation_format)
545 				xrgb8888_emulation_format = info;
546 			break;
547 		case DRM_FORMAT_RGB565:
548 			rgb565_supported = true;
549 			if (!xrgb8888_emulation_format)
550 				xrgb8888_emulation_format = info;
551 			break;
552 		case DRM_FORMAT_XRGB8888:
553 			xrgb8888_supported = true;
554 			break;
555 		}
556 
557 		fmt_buf_size = drm_format_info_min_pitch(info, 0, drm->mode_config.max_width) *
558 			       drm->mode_config.max_height;
559 		max_buffer_size = max(max_buffer_size, fmt_buf_size);
560 
561 		if (format == GUD_DRM_FORMAT_R1 || format == GUD_DRM_FORMAT_XRGB1111)
562 			continue; /* Internal not for userspace */
563 
564 		formats[num_formats++] = format;
565 	}
566 
567 	if (!num_formats && !xrgb8888_emulation_format) {
568 		dev_err(dev, "No supported pixel formats found\n");
569 		return -EINVAL;
570 	}
571 
572 	/* Prefer speed over color depth */
573 	if (rgb565_supported)
574 		drm->mode_config.preferred_depth = 16;
575 
576 	if (!xrgb8888_supported && xrgb8888_emulation_format) {
577 		gdrm->xrgb8888_emulation_format = xrgb8888_emulation_format;
578 		formats[num_formats++] = DRM_FORMAT_XRGB8888;
579 	}
580 
581 	if (desc.max_buffer_size)
582 		max_buffer_size = le32_to_cpu(desc.max_buffer_size);
583 	/* Prevent a misbehaving device from allocating loads of RAM. 4096x4096@XRGB8888 = 64 MB */
584 	if (max_buffer_size > SZ_64M)
585 		max_buffer_size = SZ_64M;
586 
587 	gdrm->bulk_pipe = usb_sndbulkpipe(interface_to_usbdev(intf), usb_endpoint_num(bulk_out));
588 	gdrm->bulk_len = max_buffer_size;
589 
590 	ret = gud_alloc_bulk_buffer(gdrm);
591 	if (ret)
592 		return ret;
593 
594 	if (gdrm->compression & GUD_COMPRESSION_LZ4) {
595 		gdrm->lz4_comp_mem = devm_kmalloc(dev, LZ4_MEM_COMPRESS, GFP_KERNEL);
596 		if (!gdrm->lz4_comp_mem)
597 			return -ENOMEM;
598 
599 		gdrm->compress_buf = vmalloc(gdrm->bulk_len);
600 		if (!gdrm->compress_buf)
601 			return -ENOMEM;
602 	}
603 
604 	/* Pipeline init */
605 	ret = drm_universal_plane_init(drm, &gdrm->plane, 0,
606 				       &gud_plane_funcs,
607 				       formats, num_formats,
608 				       gud_plane_modifiers,
609 				       DRM_PLANE_TYPE_PRIMARY, NULL);
610 	if (ret)
611 		return ret;
612 
613 	drm_plane_helper_add(&gdrm->plane, &gud_plane_helper_funcs);
614 	drm_plane_enable_fb_damage_clips(&gdrm->plane);
615 
616 	ret = gud_plane_add_properties(gdrm);
617 	if (ret) {
618 		dev_err(dev, "Failed to add properties (error=%d)\n", ret);
619 		return ret;
620 	}
621 
622 	ret = drm_crtc_init_with_planes(drm, &gdrm->crtc, &gdrm->plane, NULL,
623 					&gud_crtc_funcs, NULL);
624 	if (ret)
625 		return ret;
626 
627 	drm_crtc_helper_add(&gdrm->crtc, &gud_crtc_helper_funcs);
628 
629 	ret = gud_get_connectors(gdrm);
630 	if (ret) {
631 		dev_err(dev, "Failed to get connectors (error=%d)\n", ret);
632 		return ret;
633 	}
634 
635 	drm_mode_config_reset(drm);
636 	drm_kms_helper_poll_init(drm);
637 
638 	drm_debugfs_add_file(drm, "stats", gud_stats_debugfs, NULL);
639 
640 	ret = drm_dev_register(drm, 0);
641 	if (ret)
642 		return ret;
643 
644 	devm_kfree(dev, formats);
645 	devm_kfree(dev, formats_dev);
646 
647 	drm_client_setup(drm, NULL);
648 
649 	return 0;
650 }
651 
652 static void gud_disconnect(struct usb_interface *interface)
653 {
654 	struct gud_device *gdrm = usb_get_intfdata(interface);
655 	struct drm_device *drm = &gdrm->drm;
656 
657 	drm_kms_helper_poll_fini(drm);
658 	drm_dev_unplug(drm);
659 	drm_atomic_helper_shutdown(drm);
660 }
661 
662 static int gud_suspend(struct usb_interface *intf, pm_message_t message)
663 {
664 	struct gud_device *gdrm = usb_get_intfdata(intf);
665 
666 	return drm_mode_config_helper_suspend(&gdrm->drm);
667 }
668 
669 static int gud_resume(struct usb_interface *intf)
670 {
671 	struct gud_device *gdrm = usb_get_intfdata(intf);
672 
673 	drm_mode_config_helper_resume(&gdrm->drm);
674 
675 	return 0;
676 }
677 
678 static const struct usb_device_id gud_id_table[] = {
679 	{ USB_DEVICE_INTERFACE_CLASS(0x1d50, 0x614d, USB_CLASS_VENDOR_SPEC) },
680 	{ USB_DEVICE_INTERFACE_CLASS(0x16d0, 0x10a9, USB_CLASS_VENDOR_SPEC) },
681 	{ }
682 };
683 
684 MODULE_DEVICE_TABLE(usb, gud_id_table);
685 
686 static struct usb_driver gud_usb_driver = {
687 	.name		= "gud",
688 	.probe		= gud_probe,
689 	.disconnect	= gud_disconnect,
690 	.id_table	= gud_id_table,
691 	.suspend	= gud_suspend,
692 	.resume		= gud_resume,
693 	.reset_resume	= gud_resume,
694 };
695 
696 module_usb_driver(gud_usb_driver);
697 
698 MODULE_AUTHOR("Noralf Trønnes");
699 MODULE_DESCRIPTION("GUD USB Display driver");
700 MODULE_LICENSE("Dual MIT/GPL");
701