1*b8f9f217SAlex Lanzano // SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause) 2*b8f9f217SAlex Lanzano 3*b8f9f217SAlex Lanzano #include <drm/drm_atomic.h> 4*b8f9f217SAlex Lanzano #include <drm/drm_atomic_helper.h> 5*b8f9f217SAlex Lanzano #include <drm/drm_client_setup.h> 6*b8f9f217SAlex Lanzano #include <drm/drm_connector.h> 7*b8f9f217SAlex Lanzano #include <drm/drm_damage_helper.h> 8*b8f9f217SAlex Lanzano #include <drm/drm_drv.h> 9*b8f9f217SAlex Lanzano #include <drm/drm_fb_dma_helper.h> 10*b8f9f217SAlex Lanzano #include <drm/drm_fbdev_dma.h> 11*b8f9f217SAlex Lanzano #include <drm/drm_format_helper.h> 12*b8f9f217SAlex Lanzano #include <drm/drm_framebuffer.h> 13*b8f9f217SAlex Lanzano #include <drm/drm_gem_atomic_helper.h> 14*b8f9f217SAlex Lanzano #include <drm/drm_gem_dma_helper.h> 15*b8f9f217SAlex Lanzano #include <drm/drm_gem_framebuffer_helper.h> 16*b8f9f217SAlex Lanzano #include <drm/drm_managed.h> 17*b8f9f217SAlex Lanzano #include <drm/drm_modes.h> 18*b8f9f217SAlex Lanzano #include <drm/drm_probe_helper.h> 19*b8f9f217SAlex Lanzano #include <drm/drm_rect.h> 20*b8f9f217SAlex Lanzano #include <linux/bitrev.h> 21*b8f9f217SAlex Lanzano #include <linux/delay.h> 22*b8f9f217SAlex Lanzano #include <linux/gpio/consumer.h> 23*b8f9f217SAlex Lanzano #include <linux/kthread.h> 24*b8f9f217SAlex Lanzano #include <linux/mod_devicetable.h> 25*b8f9f217SAlex Lanzano #include <linux/module.h> 26*b8f9f217SAlex Lanzano #include <linux/mutex.h> 27*b8f9f217SAlex Lanzano #include <linux/pwm.h> 28*b8f9f217SAlex Lanzano #include <linux/spi/spi.h> 29*b8f9f217SAlex Lanzano 30*b8f9f217SAlex Lanzano #define SHARP_MODE_PERIOD 8 31*b8f9f217SAlex Lanzano #define SHARP_ADDR_PERIOD 8 32*b8f9f217SAlex Lanzano #define SHARP_DUMMY_PERIOD 8 33*b8f9f217SAlex Lanzano 34*b8f9f217SAlex Lanzano #define SHARP_MEMORY_DISPLAY_MAINTAIN_MODE 0 35*b8f9f217SAlex Lanzano #define SHARP_MEMORY_DISPLAY_UPDATE_MODE 1 36*b8f9f217SAlex Lanzano #define SHARP_MEMORY_DISPLAY_CLEAR_MODE 4 37*b8f9f217SAlex Lanzano 38*b8f9f217SAlex Lanzano enum sharp_memory_model { 39*b8f9f217SAlex Lanzano LS010B7DH04, 40*b8f9f217SAlex Lanzano LS011B7DH03, 41*b8f9f217SAlex Lanzano LS012B7DD01, 42*b8f9f217SAlex Lanzano LS013B7DH03, 43*b8f9f217SAlex Lanzano LS013B7DH05, 44*b8f9f217SAlex Lanzano LS018B7DH02, 45*b8f9f217SAlex Lanzano LS027B7DH01, 46*b8f9f217SAlex Lanzano LS027B7DH01A, 47*b8f9f217SAlex Lanzano LS032B7DD02, 48*b8f9f217SAlex Lanzano LS044Q7DH01, 49*b8f9f217SAlex Lanzano }; 50*b8f9f217SAlex Lanzano 51*b8f9f217SAlex Lanzano enum sharp_memory_vcom_mode { 52*b8f9f217SAlex Lanzano SHARP_MEMORY_SOFTWARE_VCOM, 53*b8f9f217SAlex Lanzano SHARP_MEMORY_EXTERNAL_VCOM, 54*b8f9f217SAlex Lanzano SHARP_MEMORY_PWM_VCOM 55*b8f9f217SAlex Lanzano }; 56*b8f9f217SAlex Lanzano 57*b8f9f217SAlex Lanzano struct sharp_memory_device { 58*b8f9f217SAlex Lanzano struct drm_device drm; 59*b8f9f217SAlex Lanzano struct spi_device *spi; 60*b8f9f217SAlex Lanzano 61*b8f9f217SAlex Lanzano const struct drm_display_mode *mode; 62*b8f9f217SAlex Lanzano 63*b8f9f217SAlex Lanzano struct drm_crtc crtc; 64*b8f9f217SAlex Lanzano struct drm_plane plane; 65*b8f9f217SAlex Lanzano struct drm_encoder encoder; 66*b8f9f217SAlex Lanzano struct drm_connector connector; 67*b8f9f217SAlex Lanzano 68*b8f9f217SAlex Lanzano struct gpio_desc *enable_gpio; 69*b8f9f217SAlex Lanzano 70*b8f9f217SAlex Lanzano struct task_struct *sw_vcom_signal; 71*b8f9f217SAlex Lanzano struct pwm_device *pwm_vcom_signal; 72*b8f9f217SAlex Lanzano 73*b8f9f217SAlex Lanzano enum sharp_memory_vcom_mode vcom_mode; 74*b8f9f217SAlex Lanzano u8 vcom; 75*b8f9f217SAlex Lanzano 76*b8f9f217SAlex Lanzano u32 pitch; 77*b8f9f217SAlex Lanzano u32 tx_buffer_size; 78*b8f9f217SAlex Lanzano u8 *tx_buffer; 79*b8f9f217SAlex Lanzano 80*b8f9f217SAlex Lanzano /* When vcom_mode == "software" a kthread is used to periodically send a 81*b8f9f217SAlex Lanzano * 'maintain display' message over spi. This mutex ensures tx_buffer access 82*b8f9f217SAlex Lanzano * and spi bus usage is synchronized in this case. 83*b8f9f217SAlex Lanzano */ 84*b8f9f217SAlex Lanzano struct mutex tx_mutex; 85*b8f9f217SAlex Lanzano }; 86*b8f9f217SAlex Lanzano 87*b8f9f217SAlex Lanzano static inline int sharp_memory_spi_write(struct spi_device *spi, void *buf, size_t len) 88*b8f9f217SAlex Lanzano { 89*b8f9f217SAlex Lanzano /* Reverse the bit order */ 90*b8f9f217SAlex Lanzano for (u8 *b = buf; b < ((u8 *)buf) + len; ++b) 91*b8f9f217SAlex Lanzano *b = bitrev8(*b); 92*b8f9f217SAlex Lanzano 93*b8f9f217SAlex Lanzano return spi_write(spi, buf, len); 94*b8f9f217SAlex Lanzano } 95*b8f9f217SAlex Lanzano 96*b8f9f217SAlex Lanzano static inline struct sharp_memory_device *drm_to_sharp_memory_device(struct drm_device *drm) 97*b8f9f217SAlex Lanzano { 98*b8f9f217SAlex Lanzano return container_of(drm, struct sharp_memory_device, drm); 99*b8f9f217SAlex Lanzano } 100*b8f9f217SAlex Lanzano 101*b8f9f217SAlex Lanzano DEFINE_DRM_GEM_DMA_FOPS(sharp_memory_fops); 102*b8f9f217SAlex Lanzano 103*b8f9f217SAlex Lanzano static const struct drm_driver sharp_memory_drm_driver = { 104*b8f9f217SAlex Lanzano .driver_features = DRIVER_GEM | DRIVER_MODESET | DRIVER_ATOMIC, 105*b8f9f217SAlex Lanzano .fops = &sharp_memory_fops, 106*b8f9f217SAlex Lanzano DRM_GEM_DMA_DRIVER_OPS_VMAP, 107*b8f9f217SAlex Lanzano DRM_FBDEV_DMA_DRIVER_OPS, 108*b8f9f217SAlex Lanzano .name = "sharp_memory_display", 109*b8f9f217SAlex Lanzano .desc = "Sharp Display Memory LCD", 110*b8f9f217SAlex Lanzano .date = "20231129", 111*b8f9f217SAlex Lanzano .major = 1, 112*b8f9f217SAlex Lanzano .minor = 0, 113*b8f9f217SAlex Lanzano }; 114*b8f9f217SAlex Lanzano 115*b8f9f217SAlex Lanzano static inline void sharp_memory_set_tx_buffer_mode(u8 *buffer, u8 mode, u8 vcom) 116*b8f9f217SAlex Lanzano { 117*b8f9f217SAlex Lanzano *buffer = mode | (vcom << 1); 118*b8f9f217SAlex Lanzano } 119*b8f9f217SAlex Lanzano 120*b8f9f217SAlex Lanzano static inline void sharp_memory_set_tx_buffer_addresses(u8 *buffer, 121*b8f9f217SAlex Lanzano struct drm_rect clip, 122*b8f9f217SAlex Lanzano u32 pitch) 123*b8f9f217SAlex Lanzano { 124*b8f9f217SAlex Lanzano for (u32 line = 0; line < clip.y2; ++line) 125*b8f9f217SAlex Lanzano buffer[line * pitch] = line + 1; 126*b8f9f217SAlex Lanzano } 127*b8f9f217SAlex Lanzano 128*b8f9f217SAlex Lanzano static void sharp_memory_set_tx_buffer_data(u8 *buffer, 129*b8f9f217SAlex Lanzano struct drm_framebuffer *fb, 130*b8f9f217SAlex Lanzano struct drm_rect clip, 131*b8f9f217SAlex Lanzano u32 pitch, 132*b8f9f217SAlex Lanzano struct drm_format_conv_state *fmtcnv_state) 133*b8f9f217SAlex Lanzano { 134*b8f9f217SAlex Lanzano int ret; 135*b8f9f217SAlex Lanzano struct iosys_map dst, vmap; 136*b8f9f217SAlex Lanzano struct drm_gem_dma_object *dma_obj = drm_fb_dma_get_gem_obj(fb, 0); 137*b8f9f217SAlex Lanzano 138*b8f9f217SAlex Lanzano ret = drm_gem_fb_begin_cpu_access(fb, DMA_FROM_DEVICE); 139*b8f9f217SAlex Lanzano if (ret) 140*b8f9f217SAlex Lanzano return; 141*b8f9f217SAlex Lanzano 142*b8f9f217SAlex Lanzano iosys_map_set_vaddr(&dst, buffer); 143*b8f9f217SAlex Lanzano iosys_map_set_vaddr(&vmap, dma_obj->vaddr); 144*b8f9f217SAlex Lanzano 145*b8f9f217SAlex Lanzano drm_fb_xrgb8888_to_mono(&dst, &pitch, &vmap, fb, &clip, fmtcnv_state); 146*b8f9f217SAlex Lanzano 147*b8f9f217SAlex Lanzano drm_gem_fb_end_cpu_access(fb, DMA_FROM_DEVICE); 148*b8f9f217SAlex Lanzano } 149*b8f9f217SAlex Lanzano 150*b8f9f217SAlex Lanzano static int sharp_memory_update_display(struct sharp_memory_device *smd, 151*b8f9f217SAlex Lanzano struct drm_framebuffer *fb, 152*b8f9f217SAlex Lanzano struct drm_rect clip, 153*b8f9f217SAlex Lanzano struct drm_format_conv_state *fmtcnv_state) 154*b8f9f217SAlex Lanzano { 155*b8f9f217SAlex Lanzano int ret; 156*b8f9f217SAlex Lanzano u32 pitch = smd->pitch; 157*b8f9f217SAlex Lanzano u8 vcom = smd->vcom; 158*b8f9f217SAlex Lanzano u8 *tx_buffer = smd->tx_buffer; 159*b8f9f217SAlex Lanzano u32 tx_buffer_size = smd->tx_buffer_size; 160*b8f9f217SAlex Lanzano 161*b8f9f217SAlex Lanzano mutex_lock(&smd->tx_mutex); 162*b8f9f217SAlex Lanzano 163*b8f9f217SAlex Lanzano /* Populate the transmit buffer with frame data */ 164*b8f9f217SAlex Lanzano sharp_memory_set_tx_buffer_mode(&tx_buffer[0], 165*b8f9f217SAlex Lanzano SHARP_MEMORY_DISPLAY_UPDATE_MODE, vcom); 166*b8f9f217SAlex Lanzano sharp_memory_set_tx_buffer_addresses(&tx_buffer[1], clip, pitch); 167*b8f9f217SAlex Lanzano sharp_memory_set_tx_buffer_data(&tx_buffer[2], fb, clip, pitch, fmtcnv_state); 168*b8f9f217SAlex Lanzano 169*b8f9f217SAlex Lanzano ret = sharp_memory_spi_write(smd->spi, tx_buffer, tx_buffer_size); 170*b8f9f217SAlex Lanzano 171*b8f9f217SAlex Lanzano mutex_unlock(&smd->tx_mutex); 172*b8f9f217SAlex Lanzano 173*b8f9f217SAlex Lanzano return ret; 174*b8f9f217SAlex Lanzano } 175*b8f9f217SAlex Lanzano 176*b8f9f217SAlex Lanzano static int sharp_memory_maintain_display(struct sharp_memory_device *smd) 177*b8f9f217SAlex Lanzano { 178*b8f9f217SAlex Lanzano int ret; 179*b8f9f217SAlex Lanzano u8 vcom = smd->vcom; 180*b8f9f217SAlex Lanzano u8 *tx_buffer = smd->tx_buffer; 181*b8f9f217SAlex Lanzano 182*b8f9f217SAlex Lanzano mutex_lock(&smd->tx_mutex); 183*b8f9f217SAlex Lanzano 184*b8f9f217SAlex Lanzano sharp_memory_set_tx_buffer_mode(&tx_buffer[0], SHARP_MEMORY_DISPLAY_MAINTAIN_MODE, vcom); 185*b8f9f217SAlex Lanzano tx_buffer[1] = 0; /* Write dummy data */ 186*b8f9f217SAlex Lanzano ret = sharp_memory_spi_write(smd->spi, tx_buffer, 2); 187*b8f9f217SAlex Lanzano 188*b8f9f217SAlex Lanzano mutex_unlock(&smd->tx_mutex); 189*b8f9f217SAlex Lanzano 190*b8f9f217SAlex Lanzano return ret; 191*b8f9f217SAlex Lanzano } 192*b8f9f217SAlex Lanzano 193*b8f9f217SAlex Lanzano static int sharp_memory_clear_display(struct sharp_memory_device *smd) 194*b8f9f217SAlex Lanzano { 195*b8f9f217SAlex Lanzano int ret; 196*b8f9f217SAlex Lanzano u8 vcom = smd->vcom; 197*b8f9f217SAlex Lanzano u8 *tx_buffer = smd->tx_buffer; 198*b8f9f217SAlex Lanzano 199*b8f9f217SAlex Lanzano mutex_lock(&smd->tx_mutex); 200*b8f9f217SAlex Lanzano 201*b8f9f217SAlex Lanzano sharp_memory_set_tx_buffer_mode(&tx_buffer[0], SHARP_MEMORY_DISPLAY_CLEAR_MODE, vcom); 202*b8f9f217SAlex Lanzano tx_buffer[1] = 0; /* write dummy data */ 203*b8f9f217SAlex Lanzano ret = sharp_memory_spi_write(smd->spi, tx_buffer, 2); 204*b8f9f217SAlex Lanzano 205*b8f9f217SAlex Lanzano mutex_unlock(&smd->tx_mutex); 206*b8f9f217SAlex Lanzano 207*b8f9f217SAlex Lanzano return ret; 208*b8f9f217SAlex Lanzano } 209*b8f9f217SAlex Lanzano 210*b8f9f217SAlex Lanzano static void sharp_memory_fb_dirty(struct drm_framebuffer *fb, struct drm_rect *rect, 211*b8f9f217SAlex Lanzano struct drm_format_conv_state *fmtconv_state) 212*b8f9f217SAlex Lanzano { 213*b8f9f217SAlex Lanzano struct drm_rect clip; 214*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = drm_to_sharp_memory_device(fb->dev); 215*b8f9f217SAlex Lanzano 216*b8f9f217SAlex Lanzano /* Always update a full line regardless of what is dirty */ 217*b8f9f217SAlex Lanzano clip.x1 = 0; 218*b8f9f217SAlex Lanzano clip.x2 = fb->width; 219*b8f9f217SAlex Lanzano clip.y1 = rect->y1; 220*b8f9f217SAlex Lanzano clip.y2 = rect->y2; 221*b8f9f217SAlex Lanzano 222*b8f9f217SAlex Lanzano sharp_memory_update_display(smd, fb, clip, fmtconv_state); 223*b8f9f217SAlex Lanzano } 224*b8f9f217SAlex Lanzano 225*b8f9f217SAlex Lanzano static int sharp_memory_plane_atomic_check(struct drm_plane *plane, 226*b8f9f217SAlex Lanzano struct drm_atomic_state *state) 227*b8f9f217SAlex Lanzano { 228*b8f9f217SAlex Lanzano struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state, plane); 229*b8f9f217SAlex Lanzano struct sharp_memory_device *smd; 230*b8f9f217SAlex Lanzano struct drm_crtc_state *crtc_state; 231*b8f9f217SAlex Lanzano 232*b8f9f217SAlex Lanzano smd = container_of(plane, struct sharp_memory_device, plane); 233*b8f9f217SAlex Lanzano crtc_state = drm_atomic_get_new_crtc_state(state, &smd->crtc); 234*b8f9f217SAlex Lanzano 235*b8f9f217SAlex Lanzano return drm_atomic_helper_check_plane_state(plane_state, crtc_state, 236*b8f9f217SAlex Lanzano DRM_PLANE_NO_SCALING, 237*b8f9f217SAlex Lanzano DRM_PLANE_NO_SCALING, 238*b8f9f217SAlex Lanzano false, false); 239*b8f9f217SAlex Lanzano } 240*b8f9f217SAlex Lanzano 241*b8f9f217SAlex Lanzano static void sharp_memory_plane_atomic_update(struct drm_plane *plane, 242*b8f9f217SAlex Lanzano struct drm_atomic_state *state) 243*b8f9f217SAlex Lanzano { 244*b8f9f217SAlex Lanzano struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane); 245*b8f9f217SAlex Lanzano struct drm_plane_state *plane_state = plane->state; 246*b8f9f217SAlex Lanzano struct drm_format_conv_state fmtcnv_state = DRM_FORMAT_CONV_STATE_INIT; 247*b8f9f217SAlex Lanzano struct sharp_memory_device *smd; 248*b8f9f217SAlex Lanzano struct drm_rect rect; 249*b8f9f217SAlex Lanzano 250*b8f9f217SAlex Lanzano smd = container_of(plane, struct sharp_memory_device, plane); 251*b8f9f217SAlex Lanzano if (!smd->crtc.state->active) 252*b8f9f217SAlex Lanzano return; 253*b8f9f217SAlex Lanzano 254*b8f9f217SAlex Lanzano if (drm_atomic_helper_damage_merged(old_state, plane_state, &rect)) 255*b8f9f217SAlex Lanzano sharp_memory_fb_dirty(plane_state->fb, &rect, &fmtcnv_state); 256*b8f9f217SAlex Lanzano 257*b8f9f217SAlex Lanzano drm_format_conv_state_release(&fmtcnv_state); 258*b8f9f217SAlex Lanzano } 259*b8f9f217SAlex Lanzano 260*b8f9f217SAlex Lanzano static const struct drm_plane_helper_funcs sharp_memory_plane_helper_funcs = { 261*b8f9f217SAlex Lanzano .prepare_fb = drm_gem_plane_helper_prepare_fb, 262*b8f9f217SAlex Lanzano .atomic_check = sharp_memory_plane_atomic_check, 263*b8f9f217SAlex Lanzano .atomic_update = sharp_memory_plane_atomic_update, 264*b8f9f217SAlex Lanzano }; 265*b8f9f217SAlex Lanzano 266*b8f9f217SAlex Lanzano static bool sharp_memory_format_mod_supported(struct drm_plane *plane, 267*b8f9f217SAlex Lanzano u32 format, 268*b8f9f217SAlex Lanzano u64 modifier) 269*b8f9f217SAlex Lanzano { 270*b8f9f217SAlex Lanzano return modifier == DRM_FORMAT_MOD_LINEAR; 271*b8f9f217SAlex Lanzano } 272*b8f9f217SAlex Lanzano 273*b8f9f217SAlex Lanzano static const struct drm_plane_funcs sharp_memory_plane_funcs = { 274*b8f9f217SAlex Lanzano .update_plane = drm_atomic_helper_update_plane, 275*b8f9f217SAlex Lanzano .disable_plane = drm_atomic_helper_disable_plane, 276*b8f9f217SAlex Lanzano .destroy = drm_plane_cleanup, 277*b8f9f217SAlex Lanzano .reset = drm_atomic_helper_plane_reset, 278*b8f9f217SAlex Lanzano .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state, 279*b8f9f217SAlex Lanzano .atomic_destroy_state = drm_atomic_helper_plane_destroy_state, 280*b8f9f217SAlex Lanzano .format_mod_supported = sharp_memory_format_mod_supported, 281*b8f9f217SAlex Lanzano }; 282*b8f9f217SAlex Lanzano 283*b8f9f217SAlex Lanzano static enum drm_mode_status sharp_memory_crtc_mode_valid(struct drm_crtc *crtc, 284*b8f9f217SAlex Lanzano const struct drm_display_mode *mode) 285*b8f9f217SAlex Lanzano { 286*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev); 287*b8f9f217SAlex Lanzano 288*b8f9f217SAlex Lanzano return drm_crtc_helper_mode_valid_fixed(crtc, mode, smd->mode); 289*b8f9f217SAlex Lanzano } 290*b8f9f217SAlex Lanzano 291*b8f9f217SAlex Lanzano static int sharp_memory_crtc_check(struct drm_crtc *crtc, 292*b8f9f217SAlex Lanzano struct drm_atomic_state *state) 293*b8f9f217SAlex Lanzano { 294*b8f9f217SAlex Lanzano struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state, crtc); 295*b8f9f217SAlex Lanzano int ret; 296*b8f9f217SAlex Lanzano 297*b8f9f217SAlex Lanzano if (!crtc_state->enable) 298*b8f9f217SAlex Lanzano goto out; 299*b8f9f217SAlex Lanzano 300*b8f9f217SAlex Lanzano ret = drm_atomic_helper_check_crtc_primary_plane(crtc_state); 301*b8f9f217SAlex Lanzano if (ret) 302*b8f9f217SAlex Lanzano return ret; 303*b8f9f217SAlex Lanzano 304*b8f9f217SAlex Lanzano out: 305*b8f9f217SAlex Lanzano return drm_atomic_add_affected_planes(state, crtc); 306*b8f9f217SAlex Lanzano } 307*b8f9f217SAlex Lanzano 308*b8f9f217SAlex Lanzano static int sharp_memory_sw_vcom_signal_thread(void *data) 309*b8f9f217SAlex Lanzano { 310*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = data; 311*b8f9f217SAlex Lanzano 312*b8f9f217SAlex Lanzano while (!kthread_should_stop()) { 313*b8f9f217SAlex Lanzano smd->vcom ^= 1; /* Toggle vcom */ 314*b8f9f217SAlex Lanzano sharp_memory_maintain_display(smd); 315*b8f9f217SAlex Lanzano msleep(1000); 316*b8f9f217SAlex Lanzano } 317*b8f9f217SAlex Lanzano 318*b8f9f217SAlex Lanzano return 0; 319*b8f9f217SAlex Lanzano } 320*b8f9f217SAlex Lanzano 321*b8f9f217SAlex Lanzano static void sharp_memory_crtc_enable(struct drm_crtc *crtc, 322*b8f9f217SAlex Lanzano struct drm_atomic_state *state) 323*b8f9f217SAlex Lanzano { 324*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev); 325*b8f9f217SAlex Lanzano 326*b8f9f217SAlex Lanzano sharp_memory_clear_display(smd); 327*b8f9f217SAlex Lanzano 328*b8f9f217SAlex Lanzano if (smd->enable_gpio) 329*b8f9f217SAlex Lanzano gpiod_set_value(smd->enable_gpio, 1); 330*b8f9f217SAlex Lanzano } 331*b8f9f217SAlex Lanzano 332*b8f9f217SAlex Lanzano static void sharp_memory_crtc_disable(struct drm_crtc *crtc, 333*b8f9f217SAlex Lanzano struct drm_atomic_state *state) 334*b8f9f217SAlex Lanzano { 335*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev); 336*b8f9f217SAlex Lanzano 337*b8f9f217SAlex Lanzano sharp_memory_clear_display(smd); 338*b8f9f217SAlex Lanzano 339*b8f9f217SAlex Lanzano if (smd->enable_gpio) 340*b8f9f217SAlex Lanzano gpiod_set_value(smd->enable_gpio, 0); 341*b8f9f217SAlex Lanzano } 342*b8f9f217SAlex Lanzano 343*b8f9f217SAlex Lanzano static const struct drm_crtc_helper_funcs sharp_memory_crtc_helper_funcs = { 344*b8f9f217SAlex Lanzano .mode_valid = sharp_memory_crtc_mode_valid, 345*b8f9f217SAlex Lanzano .atomic_check = sharp_memory_crtc_check, 346*b8f9f217SAlex Lanzano .atomic_enable = sharp_memory_crtc_enable, 347*b8f9f217SAlex Lanzano .atomic_disable = sharp_memory_crtc_disable, 348*b8f9f217SAlex Lanzano }; 349*b8f9f217SAlex Lanzano 350*b8f9f217SAlex Lanzano static const struct drm_crtc_funcs sharp_memory_crtc_funcs = { 351*b8f9f217SAlex Lanzano .reset = drm_atomic_helper_crtc_reset, 352*b8f9f217SAlex Lanzano .destroy = drm_crtc_cleanup, 353*b8f9f217SAlex Lanzano .set_config = drm_atomic_helper_set_config, 354*b8f9f217SAlex Lanzano .page_flip = drm_atomic_helper_page_flip, 355*b8f9f217SAlex Lanzano .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state, 356*b8f9f217SAlex Lanzano .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state, 357*b8f9f217SAlex Lanzano }; 358*b8f9f217SAlex Lanzano 359*b8f9f217SAlex Lanzano static const struct drm_encoder_funcs sharp_memory_encoder_funcs = { 360*b8f9f217SAlex Lanzano .destroy = drm_encoder_cleanup, 361*b8f9f217SAlex Lanzano }; 362*b8f9f217SAlex Lanzano 363*b8f9f217SAlex Lanzano static int sharp_memory_connector_get_modes(struct drm_connector *connector) 364*b8f9f217SAlex Lanzano { 365*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = drm_to_sharp_memory_device(connector->dev); 366*b8f9f217SAlex Lanzano 367*b8f9f217SAlex Lanzano return drm_connector_helper_get_modes_fixed(connector, smd->mode); 368*b8f9f217SAlex Lanzano } 369*b8f9f217SAlex Lanzano 370*b8f9f217SAlex Lanzano static const struct drm_connector_helper_funcs sharp_memory_connector_hfuncs = { 371*b8f9f217SAlex Lanzano .get_modes = sharp_memory_connector_get_modes, 372*b8f9f217SAlex Lanzano }; 373*b8f9f217SAlex Lanzano 374*b8f9f217SAlex Lanzano static const struct drm_connector_funcs sharp_memory_connector_funcs = { 375*b8f9f217SAlex Lanzano .reset = drm_atomic_helper_connector_reset, 376*b8f9f217SAlex Lanzano .fill_modes = drm_helper_probe_single_connector_modes, 377*b8f9f217SAlex Lanzano .destroy = drm_connector_cleanup, 378*b8f9f217SAlex Lanzano .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state, 379*b8f9f217SAlex Lanzano .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 380*b8f9f217SAlex Lanzano 381*b8f9f217SAlex Lanzano }; 382*b8f9f217SAlex Lanzano 383*b8f9f217SAlex Lanzano static const struct drm_mode_config_funcs sharp_memory_mode_config_funcs = { 384*b8f9f217SAlex Lanzano .fb_create = drm_gem_fb_create_with_dirty, 385*b8f9f217SAlex Lanzano .atomic_check = drm_atomic_helper_check, 386*b8f9f217SAlex Lanzano .atomic_commit = drm_atomic_helper_commit, 387*b8f9f217SAlex Lanzano }; 388*b8f9f217SAlex Lanzano 389*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls010b7dh04_mode = { 390*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(128, 128, 18, 18), 391*b8f9f217SAlex Lanzano }; 392*b8f9f217SAlex Lanzano 393*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls011b7dh03_mode = { 394*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(160, 68, 25, 10), 395*b8f9f217SAlex Lanzano }; 396*b8f9f217SAlex Lanzano 397*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls012b7dd01_mode = { 398*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(184, 38, 29, 6), 399*b8f9f217SAlex Lanzano }; 400*b8f9f217SAlex Lanzano 401*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls013b7dh03_mode = { 402*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(128, 128, 23, 23), 403*b8f9f217SAlex Lanzano }; 404*b8f9f217SAlex Lanzano 405*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls013b7dh05_mode = { 406*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(144, 168, 20, 24), 407*b8f9f217SAlex Lanzano }; 408*b8f9f217SAlex Lanzano 409*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls018b7dh02_mode = { 410*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(230, 303, 27, 36), 411*b8f9f217SAlex Lanzano }; 412*b8f9f217SAlex Lanzano 413*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls027b7dh01_mode = { 414*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(400, 240, 58, 35), 415*b8f9f217SAlex Lanzano }; 416*b8f9f217SAlex Lanzano 417*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls032b7dd02_mode = { 418*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(336, 536, 42, 68), 419*b8f9f217SAlex Lanzano }; 420*b8f9f217SAlex Lanzano 421*b8f9f217SAlex Lanzano static const struct drm_display_mode sharp_memory_ls044q7dh01_mode = { 422*b8f9f217SAlex Lanzano DRM_SIMPLE_MODE(320, 240, 89, 67), 423*b8f9f217SAlex Lanzano }; 424*b8f9f217SAlex Lanzano 425*b8f9f217SAlex Lanzano static const struct spi_device_id sharp_memory_ids[] = { 426*b8f9f217SAlex Lanzano {"ls010b7dh04", (kernel_ulong_t)&sharp_memory_ls010b7dh04_mode}, 427*b8f9f217SAlex Lanzano {"ls011b7dh03", (kernel_ulong_t)&sharp_memory_ls011b7dh03_mode}, 428*b8f9f217SAlex Lanzano {"ls012b7dd01", (kernel_ulong_t)&sharp_memory_ls012b7dd01_mode}, 429*b8f9f217SAlex Lanzano {"ls013b7dh03", (kernel_ulong_t)&sharp_memory_ls013b7dh03_mode}, 430*b8f9f217SAlex Lanzano {"ls013b7dh05", (kernel_ulong_t)&sharp_memory_ls013b7dh05_mode}, 431*b8f9f217SAlex Lanzano {"ls018b7dh02", (kernel_ulong_t)&sharp_memory_ls018b7dh02_mode}, 432*b8f9f217SAlex Lanzano {"ls027b7dh01", (kernel_ulong_t)&sharp_memory_ls027b7dh01_mode}, 433*b8f9f217SAlex Lanzano {"ls027b7dh01a", (kernel_ulong_t)&sharp_memory_ls027b7dh01_mode}, 434*b8f9f217SAlex Lanzano {"ls032b7dd02", (kernel_ulong_t)&sharp_memory_ls032b7dd02_mode}, 435*b8f9f217SAlex Lanzano {"ls044q7dh01", (kernel_ulong_t)&sharp_memory_ls044q7dh01_mode}, 436*b8f9f217SAlex Lanzano {}, 437*b8f9f217SAlex Lanzano }; 438*b8f9f217SAlex Lanzano MODULE_DEVICE_TABLE(spi, sharp_memory_ids); 439*b8f9f217SAlex Lanzano 440*b8f9f217SAlex Lanzano static const struct of_device_id sharp_memory_of_match[] = { 441*b8f9f217SAlex Lanzano {.compatible = "sharp,ls010b7dh04", &sharp_memory_ls010b7dh04_mode}, 442*b8f9f217SAlex Lanzano {.compatible = "sharp,ls011b7dh03", &sharp_memory_ls011b7dh03_mode}, 443*b8f9f217SAlex Lanzano {.compatible = "sharp,ls012b7dd01", &sharp_memory_ls012b7dd01_mode}, 444*b8f9f217SAlex Lanzano {.compatible = "sharp,ls013b7dh03", &sharp_memory_ls013b7dh03_mode}, 445*b8f9f217SAlex Lanzano {.compatible = "sharp,ls013b7dh05", &sharp_memory_ls013b7dh05_mode}, 446*b8f9f217SAlex Lanzano {.compatible = "sharp,ls018b7dh02", &sharp_memory_ls018b7dh02_mode}, 447*b8f9f217SAlex Lanzano {.compatible = "sharp,ls027b7dh01", &sharp_memory_ls027b7dh01_mode}, 448*b8f9f217SAlex Lanzano {.compatible = "sharp,ls027b7dh01a", &sharp_memory_ls027b7dh01_mode}, 449*b8f9f217SAlex Lanzano {.compatible = "sharp,ls032b7dd02", &sharp_memory_ls032b7dd02_mode}, 450*b8f9f217SAlex Lanzano {.compatible = "sharp,ls044q7dh01", &sharp_memory_ls044q7dh01_mode}, 451*b8f9f217SAlex Lanzano {}, 452*b8f9f217SAlex Lanzano }; 453*b8f9f217SAlex Lanzano MODULE_DEVICE_TABLE(of, sharp_memory_of_match); 454*b8f9f217SAlex Lanzano 455*b8f9f217SAlex Lanzano static const u32 sharp_memory_formats[] = { 456*b8f9f217SAlex Lanzano DRM_FORMAT_XRGB8888, 457*b8f9f217SAlex Lanzano }; 458*b8f9f217SAlex Lanzano 459*b8f9f217SAlex Lanzano static int sharp_memory_pipe_init(struct drm_device *dev, 460*b8f9f217SAlex Lanzano struct sharp_memory_device *smd, 461*b8f9f217SAlex Lanzano const u32 *formats, unsigned int format_count, 462*b8f9f217SAlex Lanzano const u64 *format_modifiers) 463*b8f9f217SAlex Lanzano { 464*b8f9f217SAlex Lanzano int ret; 465*b8f9f217SAlex Lanzano struct drm_encoder *encoder = &smd->encoder; 466*b8f9f217SAlex Lanzano struct drm_plane *plane = &smd->plane; 467*b8f9f217SAlex Lanzano struct drm_crtc *crtc = &smd->crtc; 468*b8f9f217SAlex Lanzano struct drm_connector *connector = &smd->connector; 469*b8f9f217SAlex Lanzano 470*b8f9f217SAlex Lanzano drm_plane_helper_add(plane, &sharp_memory_plane_helper_funcs); 471*b8f9f217SAlex Lanzano ret = drm_universal_plane_init(dev, plane, 0, 472*b8f9f217SAlex Lanzano &sharp_memory_plane_funcs, 473*b8f9f217SAlex Lanzano formats, format_count, 474*b8f9f217SAlex Lanzano format_modifiers, 475*b8f9f217SAlex Lanzano DRM_PLANE_TYPE_PRIMARY, NULL); 476*b8f9f217SAlex Lanzano if (ret) 477*b8f9f217SAlex Lanzano return ret; 478*b8f9f217SAlex Lanzano 479*b8f9f217SAlex Lanzano drm_crtc_helper_add(crtc, &sharp_memory_crtc_helper_funcs); 480*b8f9f217SAlex Lanzano ret = drm_crtc_init_with_planes(dev, crtc, plane, NULL, 481*b8f9f217SAlex Lanzano &sharp_memory_crtc_funcs, NULL); 482*b8f9f217SAlex Lanzano if (ret) 483*b8f9f217SAlex Lanzano return ret; 484*b8f9f217SAlex Lanzano 485*b8f9f217SAlex Lanzano encoder->possible_crtcs = drm_crtc_mask(crtc); 486*b8f9f217SAlex Lanzano ret = drm_encoder_init(dev, encoder, &sharp_memory_encoder_funcs, 487*b8f9f217SAlex Lanzano DRM_MODE_ENCODER_NONE, NULL); 488*b8f9f217SAlex Lanzano if (ret) 489*b8f9f217SAlex Lanzano return ret; 490*b8f9f217SAlex Lanzano 491*b8f9f217SAlex Lanzano ret = drm_connector_init(&smd->drm, &smd->connector, 492*b8f9f217SAlex Lanzano &sharp_memory_connector_funcs, 493*b8f9f217SAlex Lanzano DRM_MODE_CONNECTOR_SPI); 494*b8f9f217SAlex Lanzano if (ret) 495*b8f9f217SAlex Lanzano return ret; 496*b8f9f217SAlex Lanzano 497*b8f9f217SAlex Lanzano drm_connector_helper_add(&smd->connector, 498*b8f9f217SAlex Lanzano &sharp_memory_connector_hfuncs); 499*b8f9f217SAlex Lanzano 500*b8f9f217SAlex Lanzano return drm_connector_attach_encoder(connector, encoder); 501*b8f9f217SAlex Lanzano } 502*b8f9f217SAlex Lanzano 503*b8f9f217SAlex Lanzano static int sharp_memory_init_pwm_vcom_signal(struct sharp_memory_device *smd) 504*b8f9f217SAlex Lanzano { 505*b8f9f217SAlex Lanzano int ret; 506*b8f9f217SAlex Lanzano struct device *dev = &smd->spi->dev; 507*b8f9f217SAlex Lanzano struct pwm_state pwm_state; 508*b8f9f217SAlex Lanzano 509*b8f9f217SAlex Lanzano smd->pwm_vcom_signal = devm_pwm_get(dev, NULL); 510*b8f9f217SAlex Lanzano if (IS_ERR(smd->pwm_vcom_signal)) 511*b8f9f217SAlex Lanzano return dev_err_probe(dev, PTR_ERR(smd->pwm_vcom_signal), 512*b8f9f217SAlex Lanzano "Could not get pwm device\n"); 513*b8f9f217SAlex Lanzano 514*b8f9f217SAlex Lanzano pwm_init_state(smd->pwm_vcom_signal, &pwm_state); 515*b8f9f217SAlex Lanzano pwm_set_relative_duty_cycle(&pwm_state, 1, 10); 516*b8f9f217SAlex Lanzano pwm_state.enabled = true; 517*b8f9f217SAlex Lanzano ret = pwm_apply_might_sleep(smd->pwm_vcom_signal, &pwm_state); 518*b8f9f217SAlex Lanzano if (ret) 519*b8f9f217SAlex Lanzano return dev_err_probe(dev, -EINVAL, "Could not apply pwm state\n"); 520*b8f9f217SAlex Lanzano 521*b8f9f217SAlex Lanzano return 0; 522*b8f9f217SAlex Lanzano } 523*b8f9f217SAlex Lanzano 524*b8f9f217SAlex Lanzano static int sharp_memory_probe(struct spi_device *spi) 525*b8f9f217SAlex Lanzano { 526*b8f9f217SAlex Lanzano int ret; 527*b8f9f217SAlex Lanzano struct device *dev; 528*b8f9f217SAlex Lanzano struct sharp_memory_device *smd; 529*b8f9f217SAlex Lanzano struct drm_device *drm; 530*b8f9f217SAlex Lanzano const char *vcom_mode_str; 531*b8f9f217SAlex Lanzano 532*b8f9f217SAlex Lanzano dev = &spi->dev; 533*b8f9f217SAlex Lanzano 534*b8f9f217SAlex Lanzano ret = spi_setup(spi); 535*b8f9f217SAlex Lanzano if (ret < 0) 536*b8f9f217SAlex Lanzano return dev_err_probe(dev, ret, "Failed to setup spi device\n"); 537*b8f9f217SAlex Lanzano 538*b8f9f217SAlex Lanzano if (!dev->coherent_dma_mask) { 539*b8f9f217SAlex Lanzano ret = dma_coerce_mask_and_coherent(dev, DMA_BIT_MASK(32)); 540*b8f9f217SAlex Lanzano if (ret) 541*b8f9f217SAlex Lanzano return dev_err_probe(dev, ret, "Failed to set dma mask\n"); 542*b8f9f217SAlex Lanzano } 543*b8f9f217SAlex Lanzano 544*b8f9f217SAlex Lanzano smd = devm_drm_dev_alloc(dev, &sharp_memory_drm_driver, 545*b8f9f217SAlex Lanzano struct sharp_memory_device, drm); 546*b8f9f217SAlex Lanzano if (!smd) 547*b8f9f217SAlex Lanzano return -ENOMEM; 548*b8f9f217SAlex Lanzano 549*b8f9f217SAlex Lanzano spi_set_drvdata(spi, smd); 550*b8f9f217SAlex Lanzano 551*b8f9f217SAlex Lanzano smd->spi = spi; 552*b8f9f217SAlex Lanzano drm = &smd->drm; 553*b8f9f217SAlex Lanzano ret = drmm_mode_config_init(drm); 554*b8f9f217SAlex Lanzano if (ret) 555*b8f9f217SAlex Lanzano return dev_err_probe(dev, ret, "Failed to initialize drm config\n"); 556*b8f9f217SAlex Lanzano 557*b8f9f217SAlex Lanzano smd->enable_gpio = devm_gpiod_get_optional(dev, "enable", GPIOD_OUT_HIGH); 558*b8f9f217SAlex Lanzano if (!smd->enable_gpio) 559*b8f9f217SAlex Lanzano dev_warn(dev, "Enable gpio not defined\n"); 560*b8f9f217SAlex Lanzano 561*b8f9f217SAlex Lanzano drm->mode_config.funcs = &sharp_memory_mode_config_funcs; 562*b8f9f217SAlex Lanzano smd->mode = spi_get_device_match_data(spi); 563*b8f9f217SAlex Lanzano 564*b8f9f217SAlex Lanzano smd->pitch = (SHARP_ADDR_PERIOD + smd->mode->hdisplay + SHARP_DUMMY_PERIOD) / 8; 565*b8f9f217SAlex Lanzano smd->tx_buffer_size = (SHARP_MODE_PERIOD + 566*b8f9f217SAlex Lanzano (SHARP_ADDR_PERIOD + (smd->mode->hdisplay) + SHARP_DUMMY_PERIOD) * 567*b8f9f217SAlex Lanzano smd->mode->vdisplay) / 8; 568*b8f9f217SAlex Lanzano 569*b8f9f217SAlex Lanzano smd->tx_buffer = devm_kzalloc(dev, smd->tx_buffer_size, GFP_KERNEL); 570*b8f9f217SAlex Lanzano if (!smd->tx_buffer) 571*b8f9f217SAlex Lanzano return -ENOMEM; 572*b8f9f217SAlex Lanzano 573*b8f9f217SAlex Lanzano mutex_init(&smd->tx_mutex); 574*b8f9f217SAlex Lanzano 575*b8f9f217SAlex Lanzano /* 576*b8f9f217SAlex Lanzano * VCOM is a signal that prevents DC bias from being built up in 577*b8f9f217SAlex Lanzano * the panel resulting in pixels being forever stuck in one state. 578*b8f9f217SAlex Lanzano * 579*b8f9f217SAlex Lanzano * This driver supports three different methods to generate this 580*b8f9f217SAlex Lanzano * signal depending on EXTMODE pin: 581*b8f9f217SAlex Lanzano * 582*b8f9f217SAlex Lanzano * software (EXTMODE = L) - This mode uses a kthread to 583*b8f9f217SAlex Lanzano * periodically send a "maintain display" message to the display, 584*b8f9f217SAlex Lanzano * toggling the vcom bit on and off with each message 585*b8f9f217SAlex Lanzano * 586*b8f9f217SAlex Lanzano * external (EXTMODE = H) - This mode relies on an external 587*b8f9f217SAlex Lanzano * clock to generate the signal on the EXTCOMM pin 588*b8f9f217SAlex Lanzano * 589*b8f9f217SAlex Lanzano * pwm (EXTMODE = H) - This mode uses a pwm device to generate 590*b8f9f217SAlex Lanzano * the signal on the EXTCOMM pin 591*b8f9f217SAlex Lanzano * 592*b8f9f217SAlex Lanzano */ 593*b8f9f217SAlex Lanzano if (device_property_read_string(dev, "sharp,vcom-mode", &vcom_mode_str)) 594*b8f9f217SAlex Lanzano return dev_err_probe(dev, -EINVAL, 595*b8f9f217SAlex Lanzano "Unable to find sharp,vcom-mode node in device tree\n"); 596*b8f9f217SAlex Lanzano 597*b8f9f217SAlex Lanzano if (!strcmp("software", vcom_mode_str)) { 598*b8f9f217SAlex Lanzano smd->vcom_mode = SHARP_MEMORY_SOFTWARE_VCOM; 599*b8f9f217SAlex Lanzano smd->sw_vcom_signal = kthread_run(sharp_memory_sw_vcom_signal_thread, 600*b8f9f217SAlex Lanzano smd, "sw_vcom_signal"); 601*b8f9f217SAlex Lanzano 602*b8f9f217SAlex Lanzano } else if (!strcmp("external", vcom_mode_str)) { 603*b8f9f217SAlex Lanzano smd->vcom_mode = SHARP_MEMORY_EXTERNAL_VCOM; 604*b8f9f217SAlex Lanzano 605*b8f9f217SAlex Lanzano } else if (!strcmp("pwm", vcom_mode_str)) { 606*b8f9f217SAlex Lanzano smd->vcom_mode = SHARP_MEMORY_PWM_VCOM; 607*b8f9f217SAlex Lanzano ret = sharp_memory_init_pwm_vcom_signal(smd); 608*b8f9f217SAlex Lanzano if (ret) 609*b8f9f217SAlex Lanzano return ret; 610*b8f9f217SAlex Lanzano } else { 611*b8f9f217SAlex Lanzano return dev_err_probe(dev, -EINVAL, "Invalid value set for vcom-mode\n"); 612*b8f9f217SAlex Lanzano } 613*b8f9f217SAlex Lanzano 614*b8f9f217SAlex Lanzano drm->mode_config.min_width = smd->mode->hdisplay; 615*b8f9f217SAlex Lanzano drm->mode_config.max_width = smd->mode->hdisplay; 616*b8f9f217SAlex Lanzano drm->mode_config.min_height = smd->mode->vdisplay; 617*b8f9f217SAlex Lanzano drm->mode_config.max_height = smd->mode->vdisplay; 618*b8f9f217SAlex Lanzano 619*b8f9f217SAlex Lanzano ret = sharp_memory_pipe_init(drm, smd, sharp_memory_formats, 620*b8f9f217SAlex Lanzano ARRAY_SIZE(sharp_memory_formats), 621*b8f9f217SAlex Lanzano NULL); 622*b8f9f217SAlex Lanzano if (ret) 623*b8f9f217SAlex Lanzano return dev_err_probe(dev, ret, "Failed to initialize display pipeline.\n"); 624*b8f9f217SAlex Lanzano 625*b8f9f217SAlex Lanzano drm_plane_enable_fb_damage_clips(&smd->plane); 626*b8f9f217SAlex Lanzano drm_mode_config_reset(drm); 627*b8f9f217SAlex Lanzano 628*b8f9f217SAlex Lanzano ret = drm_dev_register(drm, 0); 629*b8f9f217SAlex Lanzano if (ret) 630*b8f9f217SAlex Lanzano return dev_err_probe(dev, ret, "Failed to register drm device.\n"); 631*b8f9f217SAlex Lanzano 632*b8f9f217SAlex Lanzano drm_client_setup(drm, NULL); 633*b8f9f217SAlex Lanzano 634*b8f9f217SAlex Lanzano return 0; 635*b8f9f217SAlex Lanzano } 636*b8f9f217SAlex Lanzano 637*b8f9f217SAlex Lanzano static void sharp_memory_remove(struct spi_device *spi) 638*b8f9f217SAlex Lanzano { 639*b8f9f217SAlex Lanzano struct sharp_memory_device *smd = spi_get_drvdata(spi); 640*b8f9f217SAlex Lanzano 641*b8f9f217SAlex Lanzano drm_dev_unplug(&smd->drm); 642*b8f9f217SAlex Lanzano drm_atomic_helper_shutdown(&smd->drm); 643*b8f9f217SAlex Lanzano 644*b8f9f217SAlex Lanzano switch (smd->vcom_mode) { 645*b8f9f217SAlex Lanzano case SHARP_MEMORY_SOFTWARE_VCOM: 646*b8f9f217SAlex Lanzano kthread_stop(smd->sw_vcom_signal); 647*b8f9f217SAlex Lanzano break; 648*b8f9f217SAlex Lanzano 649*b8f9f217SAlex Lanzano case SHARP_MEMORY_EXTERNAL_VCOM: 650*b8f9f217SAlex Lanzano break; 651*b8f9f217SAlex Lanzano 652*b8f9f217SAlex Lanzano case SHARP_MEMORY_PWM_VCOM: 653*b8f9f217SAlex Lanzano pwm_disable(smd->pwm_vcom_signal); 654*b8f9f217SAlex Lanzano break; 655*b8f9f217SAlex Lanzano } 656*b8f9f217SAlex Lanzano } 657*b8f9f217SAlex Lanzano 658*b8f9f217SAlex Lanzano static struct spi_driver sharp_memory_spi_driver = { 659*b8f9f217SAlex Lanzano .driver = { 660*b8f9f217SAlex Lanzano .name = "sharp_memory", 661*b8f9f217SAlex Lanzano .of_match_table = sharp_memory_of_match, 662*b8f9f217SAlex Lanzano }, 663*b8f9f217SAlex Lanzano .probe = sharp_memory_probe, 664*b8f9f217SAlex Lanzano .remove = sharp_memory_remove, 665*b8f9f217SAlex Lanzano .id_table = sharp_memory_ids, 666*b8f9f217SAlex Lanzano }; 667*b8f9f217SAlex Lanzano module_spi_driver(sharp_memory_spi_driver); 668*b8f9f217SAlex Lanzano 669*b8f9f217SAlex Lanzano MODULE_AUTHOR("Alex Lanzano <lanzano.alex@gmail.com>"); 670*b8f9f217SAlex Lanzano MODULE_DESCRIPTION("SPI Protocol driver for the sharp_memory display"); 671*b8f9f217SAlex Lanzano MODULE_LICENSE("GPL"); 672