1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright 2020 Noralf Trønnes 4 */ 5 6 #include <linux/dma-buf.h> 7 #include <linux/dma-mapping.h> 8 #include <linux/lz4.h> 9 #include <linux/module.h> 10 #include <linux/platform_device.h> 11 #include <linux/string_helpers.h> 12 #include <linux/usb.h> 13 #include <linux/vmalloc.h> 14 #include <linux/workqueue.h> 15 16 #include <drm/clients/drm_client_setup.h> 17 #include <drm/drm_atomic_helper.h> 18 #include <drm/drm_blend.h> 19 #include <drm/drm_crtc_helper.h> 20 #include <drm/drm_damage_helper.h> 21 #include <drm/drm_debugfs.h> 22 #include <drm/drm_drv.h> 23 #include <drm/drm_fbdev_shmem.h> 24 #include <drm/drm_fourcc.h> 25 #include <drm/drm_gem_atomic_helper.h> 26 #include <drm/drm_gem_framebuffer_helper.h> 27 #include <drm/drm_gem_shmem_helper.h> 28 #include <drm/drm_managed.h> 29 #include <drm/drm_print.h> 30 #include <drm/drm_probe_helper.h> 31 #include <drm/gud.h> 32 33 #include "gud_internal.h" 34 35 /* Only used internally */ 36 static const struct drm_format_info gud_drm_format_r1 = { 37 .format = GUD_DRM_FORMAT_R1, 38 .num_planes = 1, 39 .char_per_block = { 1, 0, 0 }, 40 .block_w = { 8, 0, 0 }, 41 .block_h = { 1, 0, 0 }, 42 .hsub = 1, 43 .vsub = 1, 44 }; 45 46 static const struct drm_format_info gud_drm_format_xrgb1111 = { 47 .format = GUD_DRM_FORMAT_XRGB1111, 48 .num_planes = 1, 49 .char_per_block = { 1, 0, 0 }, 50 .block_w = { 2, 0, 0 }, 51 .block_h = { 1, 0, 0 }, 52 .hsub = 1, 53 .vsub = 1, 54 }; 55 56 static int gud_usb_control_msg(struct usb_interface *intf, bool in, 57 u8 request, u16 value, void *buf, size_t len) 58 { 59 u8 requesttype = USB_TYPE_VENDOR | USB_RECIP_INTERFACE; 60 u8 ifnum = intf->cur_altsetting->desc.bInterfaceNumber; 61 struct usb_device *usb = interface_to_usbdev(intf); 62 unsigned int pipe; 63 64 if (len && !buf) 65 return -EINVAL; 66 67 if (in) { 68 pipe = usb_rcvctrlpipe(usb, 0); 69 requesttype |= USB_DIR_IN; 70 } else { 71 pipe = usb_sndctrlpipe(usb, 0); 72 requesttype |= USB_DIR_OUT; 73 } 74 75 return usb_control_msg(usb, pipe, request, requesttype, value, 76 ifnum, buf, len, USB_CTRL_GET_TIMEOUT); 77 } 78 79 static int gud_get_display_descriptor(struct usb_interface *intf, 80 struct gud_display_descriptor_req *desc) 81 { 82 void *buf; 83 int ret; 84 85 buf = kmalloc(sizeof(*desc), GFP_KERNEL); 86 if (!buf) 87 return -ENOMEM; 88 89 ret = gud_usb_control_msg(intf, true, GUD_REQ_GET_DESCRIPTOR, 0, buf, sizeof(*desc)); 90 memcpy(desc, buf, sizeof(*desc)); 91 kfree(buf); 92 if (ret < 0) 93 return ret; 94 if (ret != sizeof(*desc)) 95 return -EIO; 96 97 if (desc->magic != le32_to_cpu(GUD_DISPLAY_MAGIC)) 98 return -ENODATA; 99 100 DRM_DEV_DEBUG_DRIVER(&intf->dev, 101 "version=%u flags=0x%x compression=0x%x max_buffer_size=%u\n", 102 desc->version, le32_to_cpu(desc->flags), desc->compression, 103 le32_to_cpu(desc->max_buffer_size)); 104 105 if (!desc->version || !desc->max_width || !desc->max_height || 106 le32_to_cpu(desc->min_width) > le32_to_cpu(desc->max_width) || 107 le32_to_cpu(desc->min_height) > le32_to_cpu(desc->max_height)) 108 return -EINVAL; 109 110 return 0; 111 } 112 113 static int gud_status_to_errno(u8 status) 114 { 115 switch (status) { 116 case GUD_STATUS_OK: 117 return 0; 118 case GUD_STATUS_BUSY: 119 return -EBUSY; 120 case GUD_STATUS_REQUEST_NOT_SUPPORTED: 121 return -EOPNOTSUPP; 122 case GUD_STATUS_PROTOCOL_ERROR: 123 return -EPROTO; 124 case GUD_STATUS_INVALID_PARAMETER: 125 return -EINVAL; 126 case GUD_STATUS_ERROR: 127 return -EREMOTEIO; 128 default: 129 return -EREMOTEIO; 130 } 131 } 132 133 static int gud_usb_get_status(struct usb_interface *intf) 134 { 135 int ret, status = -EIO; 136 u8 *buf; 137 138 buf = kmalloc(sizeof(*buf), GFP_KERNEL); 139 if (!buf) 140 return -ENOMEM; 141 142 ret = gud_usb_control_msg(intf, true, GUD_REQ_GET_STATUS, 0, buf, sizeof(*buf)); 143 if (ret == sizeof(*buf)) 144 status = gud_status_to_errno(*buf); 145 kfree(buf); 146 147 if (ret < 0) 148 return ret; 149 150 return status; 151 } 152 153 static int gud_usb_transfer(struct gud_device *gdrm, bool in, u8 request, u16 index, 154 void *buf, size_t len) 155 { 156 struct usb_interface *intf = to_usb_interface(gdrm->drm.dev); 157 int idx, ret; 158 159 drm_dbg(&gdrm->drm, "%s: request=0x%x index=%u len=%zu\n", 160 in ? "get" : "set", request, index, len); 161 162 if (!drm_dev_enter(&gdrm->drm, &idx)) 163 return -ENODEV; 164 165 mutex_lock(&gdrm->ctrl_lock); 166 167 ret = gud_usb_control_msg(intf, in, request, index, buf, len); 168 if (ret == -EPIPE || ((gdrm->flags & GUD_DISPLAY_FLAG_STATUS_ON_SET) && !in && ret >= 0)) { 169 int status; 170 171 status = gud_usb_get_status(intf); 172 if (status < 0) { 173 ret = status; 174 } else if (ret < 0) { 175 dev_err_once(gdrm->drm.dev, 176 "Unexpected status OK for failed transfer\n"); 177 ret = -EPIPE; 178 } 179 } 180 181 if (ret < 0) { 182 drm_dbg(&gdrm->drm, "ret=%d\n", ret); 183 gdrm->stats_num_errors++; 184 } 185 186 mutex_unlock(&gdrm->ctrl_lock); 187 drm_dev_exit(idx); 188 189 return ret; 190 } 191 192 /* 193 * @buf cannot be allocated on the stack. 194 * Returns number of bytes received or negative error code on failure. 195 */ 196 int gud_usb_get(struct gud_device *gdrm, u8 request, u16 index, void *buf, size_t max_len) 197 { 198 return gud_usb_transfer(gdrm, true, request, index, buf, max_len); 199 } 200 201 /* 202 * @buf can be allocated on the stack or NULL. 203 * Returns zero on success or negative error code on failure. 204 */ 205 int gud_usb_set(struct gud_device *gdrm, u8 request, u16 index, void *buf, size_t len) 206 { 207 void *trbuf = NULL; 208 int ret; 209 210 if (buf && len) { 211 trbuf = kmemdup(buf, len, GFP_KERNEL); 212 if (!trbuf) 213 return -ENOMEM; 214 } 215 216 ret = gud_usb_transfer(gdrm, false, request, index, trbuf, len); 217 kfree(trbuf); 218 if (ret < 0) 219 return ret; 220 221 return ret != len ? -EIO : 0; 222 } 223 224 /* 225 * @val can be allocated on the stack. 226 * Returns zero on success or negative error code on failure. 227 */ 228 int gud_usb_get_u8(struct gud_device *gdrm, u8 request, u16 index, u8 *val) 229 { 230 u8 *buf; 231 int ret; 232 233 buf = kmalloc(sizeof(*val), GFP_KERNEL); 234 if (!buf) 235 return -ENOMEM; 236 237 ret = gud_usb_get(gdrm, request, index, buf, sizeof(*val)); 238 *val = *buf; 239 kfree(buf); 240 if (ret < 0) 241 return ret; 242 243 return ret != sizeof(*val) ? -EIO : 0; 244 } 245 246 /* Returns zero on success or negative error code on failure. */ 247 int gud_usb_set_u8(struct gud_device *gdrm, u8 request, u8 val) 248 { 249 return gud_usb_set(gdrm, request, 0, &val, sizeof(val)); 250 } 251 252 static int gud_get_properties(struct gud_device *gdrm) 253 { 254 struct gud_property_req *properties; 255 unsigned int i, num_properties; 256 int ret; 257 258 properties = kcalloc(GUD_PROPERTIES_MAX_NUM, sizeof(*properties), GFP_KERNEL); 259 if (!properties) 260 return -ENOMEM; 261 262 ret = gud_usb_get(gdrm, GUD_REQ_GET_PROPERTIES, 0, 263 properties, GUD_PROPERTIES_MAX_NUM * sizeof(*properties)); 264 if (ret <= 0) 265 goto out; 266 if (ret % sizeof(*properties)) { 267 ret = -EIO; 268 goto out; 269 } 270 271 num_properties = ret / sizeof(*properties); 272 ret = 0; 273 274 gdrm->properties = drmm_kcalloc(&gdrm->drm, num_properties, sizeof(*gdrm->properties), 275 GFP_KERNEL); 276 if (!gdrm->properties) { 277 ret = -ENOMEM; 278 goto out; 279 } 280 281 for (i = 0; i < num_properties; i++) { 282 u16 prop = le16_to_cpu(properties[i].prop); 283 u64 val = le64_to_cpu(properties[i].val); 284 285 switch (prop) { 286 case GUD_PROPERTY_ROTATION: 287 /* 288 * DRM UAPI matches the protocol so use the value directly, 289 * but mask out any additions on future devices. 290 */ 291 val &= GUD_ROTATION_MASK; 292 ret = drm_plane_create_rotation_property(&gdrm->plane, 293 DRM_MODE_ROTATE_0, val); 294 break; 295 default: 296 /* New ones might show up in future devices, skip those we don't know. */ 297 drm_dbg(&gdrm->drm, "Ignoring unknown property: %u\n", prop); 298 continue; 299 } 300 301 if (ret) 302 goto out; 303 304 gdrm->properties[gdrm->num_properties++] = prop; 305 } 306 out: 307 kfree(properties); 308 309 return ret; 310 } 311 312 static int gud_stats_debugfs(struct seq_file *m, void *data) 313 { 314 struct drm_debugfs_entry *entry = m->private; 315 struct gud_device *gdrm = to_gud_device(entry->dev); 316 char buf[10]; 317 318 string_get_size(gdrm->bulk_len, 1, STRING_UNITS_2, buf, sizeof(buf)); 319 seq_printf(m, "Max buffer size: %s\n", buf); 320 seq_printf(m, "Number of errors: %u\n", gdrm->stats_num_errors); 321 322 seq_puts(m, "Compression: "); 323 if (gdrm->compression & GUD_COMPRESSION_LZ4) 324 seq_puts(m, " lz4"); 325 if (!gdrm->compression) 326 seq_puts(m, " none"); 327 seq_puts(m, "\n"); 328 329 if (gdrm->compression) { 330 u64 remainder; 331 u64 ratio = div64_u64_rem(gdrm->stats_length, gdrm->stats_actual_length, 332 &remainder); 333 u64 ratio_frac = div64_u64(remainder * 10, gdrm->stats_actual_length); 334 335 seq_printf(m, "Compression ratio: %llu.%llu\n", ratio, ratio_frac); 336 } 337 338 return 0; 339 } 340 341 static const struct drm_crtc_helper_funcs gud_crtc_helper_funcs = { 342 .atomic_check = drm_crtc_helper_atomic_check 343 }; 344 345 static const struct drm_crtc_funcs gud_crtc_funcs = { 346 .reset = drm_atomic_helper_crtc_reset, 347 .destroy = drm_crtc_cleanup, 348 .set_config = drm_atomic_helper_set_config, 349 .page_flip = drm_atomic_helper_page_flip, 350 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state, 351 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state, 352 }; 353 354 static const struct drm_plane_helper_funcs gud_plane_helper_funcs = { 355 DRM_GEM_SHADOW_PLANE_HELPER_FUNCS, 356 .atomic_check = gud_plane_atomic_check, 357 .atomic_update = gud_plane_atomic_update, 358 }; 359 360 static const struct drm_plane_funcs gud_plane_funcs = { 361 .update_plane = drm_atomic_helper_update_plane, 362 .disable_plane = drm_atomic_helper_disable_plane, 363 .destroy = drm_plane_cleanup, 364 DRM_GEM_SHADOW_PLANE_FUNCS, 365 }; 366 367 static const struct drm_mode_config_funcs gud_mode_config_funcs = { 368 .fb_create = drm_gem_fb_create_with_dirty, 369 .atomic_check = drm_atomic_helper_check, 370 .atomic_commit = drm_atomic_helper_commit, 371 }; 372 373 static const u64 gud_plane_modifiers[] = { 374 DRM_FORMAT_MOD_LINEAR, 375 DRM_FORMAT_MOD_INVALID 376 }; 377 378 DEFINE_DRM_GEM_FOPS(gud_fops); 379 380 static const struct drm_driver gud_drm_driver = { 381 .driver_features = DRIVER_MODESET | DRIVER_GEM | DRIVER_ATOMIC, 382 .fops = &gud_fops, 383 DRM_GEM_SHMEM_DRIVER_OPS, 384 DRM_FBDEV_SHMEM_DRIVER_OPS, 385 386 .name = "gud", 387 .desc = "Generic USB Display", 388 .major = 1, 389 .minor = 0, 390 }; 391 392 static int gud_alloc_bulk_buffer(struct gud_device *gdrm) 393 { 394 unsigned int i, num_pages; 395 struct page **pages; 396 void *ptr; 397 int ret; 398 399 gdrm->bulk_buf = vmalloc_32(gdrm->bulk_len); 400 if (!gdrm->bulk_buf) 401 return -ENOMEM; 402 403 num_pages = DIV_ROUND_UP(gdrm->bulk_len, PAGE_SIZE); 404 pages = kmalloc_array(num_pages, sizeof(struct page *), GFP_KERNEL); 405 if (!pages) 406 return -ENOMEM; 407 408 for (i = 0, ptr = gdrm->bulk_buf; i < num_pages; i++, ptr += PAGE_SIZE) 409 pages[i] = vmalloc_to_page(ptr); 410 411 ret = sg_alloc_table_from_pages(&gdrm->bulk_sgt, pages, num_pages, 412 0, gdrm->bulk_len, GFP_KERNEL); 413 kfree(pages); 414 415 return ret; 416 } 417 418 static void gud_free_buffers_and_mutex(void *data) 419 { 420 struct gud_device *gdrm = data; 421 422 vfree(gdrm->compress_buf); 423 gdrm->compress_buf = NULL; 424 sg_free_table(&gdrm->bulk_sgt); 425 vfree(gdrm->bulk_buf); 426 gdrm->bulk_buf = NULL; 427 mutex_destroy(&gdrm->ctrl_lock); 428 } 429 430 static int gud_probe(struct usb_interface *intf, const struct usb_device_id *id) 431 { 432 const struct drm_format_info *xrgb8888_emulation_format = NULL; 433 bool rgb565_supported = false, xrgb8888_supported = false; 434 unsigned int num_formats_dev, num_formats = 0; 435 struct usb_endpoint_descriptor *bulk_out; 436 struct gud_display_descriptor_req desc; 437 struct device *dev = &intf->dev; 438 size_t max_buffer_size = 0; 439 struct gud_device *gdrm; 440 struct drm_device *drm; 441 struct device *dma_dev; 442 u8 *formats_dev; 443 u32 *formats; 444 int ret, i; 445 446 ret = usb_find_bulk_out_endpoint(intf->cur_altsetting, &bulk_out); 447 if (ret) 448 return ret; 449 450 ret = gud_get_display_descriptor(intf, &desc); 451 if (ret) { 452 DRM_DEV_DEBUG_DRIVER(dev, "Not a display interface: ret=%d\n", ret); 453 return -ENODEV; 454 } 455 456 if (desc.version > 1) { 457 dev_err(dev, "Protocol version %u is not supported\n", desc.version); 458 return -ENODEV; 459 } 460 461 gdrm = devm_drm_dev_alloc(dev, &gud_drm_driver, struct gud_device, drm); 462 if (IS_ERR(gdrm)) 463 return PTR_ERR(gdrm); 464 465 drm = &gdrm->drm; 466 drm->mode_config.funcs = &gud_mode_config_funcs; 467 ret = drmm_mode_config_init(drm); 468 if (ret) 469 return ret; 470 471 gdrm->flags = le32_to_cpu(desc.flags); 472 gdrm->compression = desc.compression & GUD_COMPRESSION_LZ4; 473 474 if (gdrm->flags & GUD_DISPLAY_FLAG_FULL_UPDATE && gdrm->compression) 475 return -EINVAL; 476 477 mutex_init(&gdrm->ctrl_lock); 478 mutex_init(&gdrm->damage_lock); 479 INIT_WORK(&gdrm->work, gud_flush_work); 480 gud_clear_damage(gdrm); 481 482 ret = devm_add_action(dev, gud_free_buffers_and_mutex, gdrm); 483 if (ret) 484 return ret; 485 486 drm->mode_config.min_width = le32_to_cpu(desc.min_width); 487 drm->mode_config.max_width = le32_to_cpu(desc.max_width); 488 drm->mode_config.min_height = le32_to_cpu(desc.min_height); 489 drm->mode_config.max_height = le32_to_cpu(desc.max_height); 490 491 formats_dev = devm_kmalloc(dev, GUD_FORMATS_MAX_NUM, GFP_KERNEL); 492 /* Add room for emulated XRGB8888 */ 493 formats = devm_kmalloc_array(dev, GUD_FORMATS_MAX_NUM + 1, sizeof(*formats), GFP_KERNEL); 494 if (!formats_dev || !formats) 495 return -ENOMEM; 496 497 ret = gud_usb_get(gdrm, GUD_REQ_GET_FORMATS, 0, formats_dev, GUD_FORMATS_MAX_NUM); 498 if (ret < 0) 499 return ret; 500 501 num_formats_dev = ret; 502 for (i = 0; i < num_formats_dev; i++) { 503 const struct drm_format_info *info; 504 size_t fmt_buf_size; 505 u32 format; 506 507 format = gud_to_fourcc(formats_dev[i]); 508 if (!format) { 509 drm_dbg(drm, "Unsupported format: 0x%02x\n", formats_dev[i]); 510 continue; 511 } 512 513 if (format == GUD_DRM_FORMAT_R1) 514 info = &gud_drm_format_r1; 515 else if (format == GUD_DRM_FORMAT_XRGB1111) 516 info = &gud_drm_format_xrgb1111; 517 else 518 info = drm_format_info(format); 519 520 switch (format) { 521 case GUD_DRM_FORMAT_R1: 522 fallthrough; 523 case DRM_FORMAT_R8: 524 fallthrough; 525 case GUD_DRM_FORMAT_XRGB1111: 526 fallthrough; 527 case DRM_FORMAT_RGB332: 528 fallthrough; 529 case DRM_FORMAT_RGB888: 530 if (!xrgb8888_emulation_format) 531 xrgb8888_emulation_format = info; 532 break; 533 case DRM_FORMAT_RGB565: 534 rgb565_supported = true; 535 if (!xrgb8888_emulation_format) 536 xrgb8888_emulation_format = info; 537 break; 538 case DRM_FORMAT_XRGB8888: 539 xrgb8888_supported = true; 540 break; 541 } 542 543 fmt_buf_size = drm_format_info_min_pitch(info, 0, drm->mode_config.max_width) * 544 drm->mode_config.max_height; 545 max_buffer_size = max(max_buffer_size, fmt_buf_size); 546 547 if (format == GUD_DRM_FORMAT_R1 || format == GUD_DRM_FORMAT_XRGB1111) 548 continue; /* Internal not for userspace */ 549 550 formats[num_formats++] = format; 551 } 552 553 if (!num_formats && !xrgb8888_emulation_format) { 554 dev_err(dev, "No supported pixel formats found\n"); 555 return -EINVAL; 556 } 557 558 /* Prefer speed over color depth */ 559 if (rgb565_supported) 560 drm->mode_config.preferred_depth = 16; 561 562 if (!xrgb8888_supported && xrgb8888_emulation_format) { 563 gdrm->xrgb8888_emulation_format = xrgb8888_emulation_format; 564 formats[num_formats++] = DRM_FORMAT_XRGB8888; 565 } 566 567 if (desc.max_buffer_size) 568 max_buffer_size = le32_to_cpu(desc.max_buffer_size); 569 /* Prevent a misbehaving device from allocating loads of RAM. 4096x4096@XRGB8888 = 64 MB */ 570 if (max_buffer_size > SZ_64M) 571 max_buffer_size = SZ_64M; 572 573 gdrm->bulk_pipe = usb_sndbulkpipe(interface_to_usbdev(intf), usb_endpoint_num(bulk_out)); 574 gdrm->bulk_len = max_buffer_size; 575 576 ret = gud_alloc_bulk_buffer(gdrm); 577 if (ret) 578 return ret; 579 580 if (gdrm->compression & GUD_COMPRESSION_LZ4) { 581 gdrm->lz4_comp_mem = devm_kmalloc(dev, LZ4_MEM_COMPRESS, GFP_KERNEL); 582 if (!gdrm->lz4_comp_mem) 583 return -ENOMEM; 584 585 gdrm->compress_buf = vmalloc(gdrm->bulk_len); 586 if (!gdrm->compress_buf) 587 return -ENOMEM; 588 } 589 590 ret = drm_universal_plane_init(drm, &gdrm->plane, 0, 591 &gud_plane_funcs, 592 formats, num_formats, 593 gud_plane_modifiers, 594 DRM_PLANE_TYPE_PRIMARY, NULL); 595 if (ret) 596 return ret; 597 598 drm_plane_helper_add(&gdrm->plane, &gud_plane_helper_funcs); 599 drm_plane_enable_fb_damage_clips(&gdrm->plane); 600 601 devm_kfree(dev, formats); 602 devm_kfree(dev, formats_dev); 603 604 ret = gud_get_properties(gdrm); 605 if (ret) { 606 dev_err(dev, "Failed to get properties (error=%d)\n", ret); 607 return ret; 608 } 609 610 ret = drm_crtc_init_with_planes(drm, &gdrm->crtc, &gdrm->plane, NULL, 611 &gud_crtc_funcs, NULL); 612 if (ret) 613 return ret; 614 615 drm_crtc_helper_add(&gdrm->crtc, &gud_crtc_helper_funcs); 616 617 ret = gud_get_connectors(gdrm); 618 if (ret) { 619 dev_err(dev, "Failed to get connectors (error=%d)\n", ret); 620 return ret; 621 } 622 623 drm_mode_config_reset(drm); 624 625 usb_set_intfdata(intf, gdrm); 626 627 dma_dev = usb_intf_get_dma_device(intf); 628 if (dma_dev) { 629 drm_dev_set_dma_dev(drm, dma_dev); 630 put_device(dma_dev); 631 } else { 632 dev_warn(dev, "buffer sharing not supported"); /* not an error */ 633 } 634 635 drm_debugfs_add_file(drm, "stats", gud_stats_debugfs, NULL); 636 637 ret = drm_dev_register(drm, 0); 638 if (ret) 639 return ret; 640 641 drm_kms_helper_poll_init(drm); 642 643 drm_client_setup(drm, NULL); 644 645 return 0; 646 } 647 648 static void gud_disconnect(struct usb_interface *interface) 649 { 650 struct gud_device *gdrm = usb_get_intfdata(interface); 651 struct drm_device *drm = &gdrm->drm; 652 653 drm_kms_helper_poll_fini(drm); 654 drm_dev_unplug(drm); 655 drm_atomic_helper_shutdown(drm); 656 } 657 658 static int gud_suspend(struct usb_interface *intf, pm_message_t message) 659 { 660 struct gud_device *gdrm = usb_get_intfdata(intf); 661 662 return drm_mode_config_helper_suspend(&gdrm->drm); 663 } 664 665 static int gud_resume(struct usb_interface *intf) 666 { 667 struct gud_device *gdrm = usb_get_intfdata(intf); 668 669 drm_mode_config_helper_resume(&gdrm->drm); 670 671 return 0; 672 } 673 674 static const struct usb_device_id gud_id_table[] = { 675 { USB_DEVICE_INTERFACE_CLASS(0x1d50, 0x614d, USB_CLASS_VENDOR_SPEC) }, 676 { USB_DEVICE_INTERFACE_CLASS(0x16d0, 0x10a9, USB_CLASS_VENDOR_SPEC) }, 677 { } 678 }; 679 680 MODULE_DEVICE_TABLE(usb, gud_id_table); 681 682 static struct usb_driver gud_usb_driver = { 683 .name = "gud", 684 .probe = gud_probe, 685 .disconnect = gud_disconnect, 686 .id_table = gud_id_table, 687 .suspend = gud_suspend, 688 .resume = gud_resume, 689 .reset_resume = gud_resume, 690 }; 691 692 module_usb_driver(gud_usb_driver); 693 694 MODULE_AUTHOR("Noralf Trønnes"); 695 MODULE_DESCRIPTION("GUD USB Display driver"); 696 MODULE_LICENSE("Dual MIT/GPL"); 697