1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
2
3 #include <drm/clients/drm_client_setup.h>
4 #include <drm/drm_atomic.h>
5 #include <drm/drm_atomic_helper.h>
6 #include <drm/drm_connector.h>
7 #include <drm/drm_damage_helper.h>
8 #include <drm/drm_drv.h>
9 #include <drm/drm_fb_dma_helper.h>
10 #include <drm/drm_fbdev_dma.h>
11 #include <drm/drm_format_helper.h>
12 #include <drm/drm_framebuffer.h>
13 #include <drm/drm_gem_atomic_helper.h>
14 #include <drm/drm_gem_dma_helper.h>
15 #include <drm/drm_gem_framebuffer_helper.h>
16 #include <drm/drm_managed.h>
17 #include <drm/drm_modes.h>
18 #include <drm/drm_probe_helper.h>
19 #include <drm/drm_rect.h>
20 #include <linux/bitrev.h>
21 #include <linux/delay.h>
22 #include <linux/gpio/consumer.h>
23 #include <linux/kthread.h>
24 #include <linux/mod_devicetable.h>
25 #include <linux/module.h>
26 #include <linux/mutex.h>
27 #include <linux/pwm.h>
28 #include <linux/spi/spi.h>
29
30 #define SHARP_MODE_PERIOD 8
31 #define SHARP_ADDR_PERIOD 8
32 #define SHARP_DUMMY_PERIOD 8
33
34 #define SHARP_MEMORY_DISPLAY_MAINTAIN_MODE 0
35 #define SHARP_MEMORY_DISPLAY_UPDATE_MODE 1
36 #define SHARP_MEMORY_DISPLAY_CLEAR_MODE 4
37
38 enum sharp_memory_model {
39 LS010B7DH04,
40 LS011B7DH03,
41 LS012B7DD01,
42 LS013B7DH03,
43 LS013B7DH05,
44 LS018B7DH02,
45 LS027B7DH01,
46 LS027B7DH01A,
47 LS032B7DD02,
48 LS044Q7DH01,
49 };
50
51 enum sharp_memory_vcom_mode {
52 SHARP_MEMORY_SOFTWARE_VCOM,
53 SHARP_MEMORY_EXTERNAL_VCOM,
54 SHARP_MEMORY_PWM_VCOM
55 };
56
57 struct sharp_memory_device {
58 struct drm_device drm;
59 struct spi_device *spi;
60
61 const struct drm_display_mode *mode;
62
63 struct drm_crtc crtc;
64 struct drm_plane plane;
65 struct drm_encoder encoder;
66 struct drm_connector connector;
67
68 struct gpio_desc *enable_gpio;
69
70 struct task_struct *sw_vcom_signal;
71 struct pwm_device *pwm_vcom_signal;
72
73 enum sharp_memory_vcom_mode vcom_mode;
74 u8 vcom;
75
76 u32 pitch;
77 u32 tx_buffer_size;
78 u8 *tx_buffer;
79
80 /* When vcom_mode == "software" a kthread is used to periodically send a
81 * 'maintain display' message over spi. This mutex ensures tx_buffer access
82 * and spi bus usage is synchronized in this case.
83 */
84 struct mutex tx_mutex;
85 };
86
sharp_memory_spi_write(struct spi_device * spi,void * buf,size_t len)87 static inline int sharp_memory_spi_write(struct spi_device *spi, void *buf, size_t len)
88 {
89 /* Reverse the bit order */
90 for (u8 *b = buf; b < ((u8 *)buf) + len; ++b)
91 *b = bitrev8(*b);
92
93 return spi_write(spi, buf, len);
94 }
95
drm_to_sharp_memory_device(struct drm_device * drm)96 static inline struct sharp_memory_device *drm_to_sharp_memory_device(struct drm_device *drm)
97 {
98 return container_of(drm, struct sharp_memory_device, drm);
99 }
100
101 DEFINE_DRM_GEM_DMA_FOPS(sharp_memory_fops);
102
103 static const struct drm_driver sharp_memory_drm_driver = {
104 .driver_features = DRIVER_GEM | DRIVER_MODESET | DRIVER_ATOMIC,
105 .fops = &sharp_memory_fops,
106 DRM_GEM_DMA_DRIVER_OPS_VMAP,
107 DRM_FBDEV_DMA_DRIVER_OPS,
108 .name = "sharp_memory_display",
109 .desc = "Sharp Display Memory LCD",
110 .major = 1,
111 .minor = 0,
112 };
113
sharp_memory_set_tx_buffer_mode(u8 * buffer,u8 mode,u8 vcom)114 static inline void sharp_memory_set_tx_buffer_mode(u8 *buffer, u8 mode, u8 vcom)
115 {
116 *buffer = mode | (vcom << 1);
117 }
118
sharp_memory_set_tx_buffer_addresses(u8 * buffer,struct drm_rect clip,u32 pitch)119 static inline void sharp_memory_set_tx_buffer_addresses(u8 *buffer,
120 struct drm_rect clip,
121 u32 pitch)
122 {
123 for (u32 line = 0; line < clip.y2; ++line)
124 buffer[line * pitch] = line + 1;
125 }
126
sharp_memory_set_tx_buffer_data(u8 * buffer,struct drm_framebuffer * fb,struct drm_rect clip,u32 pitch,struct drm_format_conv_state * fmtcnv_state)127 static void sharp_memory_set_tx_buffer_data(u8 *buffer,
128 struct drm_framebuffer *fb,
129 struct drm_rect clip,
130 u32 pitch,
131 struct drm_format_conv_state *fmtcnv_state)
132 {
133 int ret;
134 struct iosys_map dst, vmap;
135 struct drm_gem_dma_object *dma_obj = drm_fb_dma_get_gem_obj(fb, 0);
136
137 ret = drm_gem_fb_begin_cpu_access(fb, DMA_FROM_DEVICE);
138 if (ret)
139 return;
140
141 iosys_map_set_vaddr(&dst, buffer);
142 iosys_map_set_vaddr(&vmap, dma_obj->vaddr);
143
144 drm_fb_xrgb8888_to_mono(&dst, &pitch, &vmap, fb, &clip, fmtcnv_state);
145
146 drm_gem_fb_end_cpu_access(fb, DMA_FROM_DEVICE);
147 }
148
sharp_memory_update_display(struct sharp_memory_device * smd,struct drm_framebuffer * fb,struct drm_rect clip,struct drm_format_conv_state * fmtcnv_state)149 static int sharp_memory_update_display(struct sharp_memory_device *smd,
150 struct drm_framebuffer *fb,
151 struct drm_rect clip,
152 struct drm_format_conv_state *fmtcnv_state)
153 {
154 int ret;
155 u32 pitch = smd->pitch;
156 u8 vcom = smd->vcom;
157 u8 *tx_buffer = smd->tx_buffer;
158 u32 tx_buffer_size = smd->tx_buffer_size;
159
160 mutex_lock(&smd->tx_mutex);
161
162 /* Populate the transmit buffer with frame data */
163 sharp_memory_set_tx_buffer_mode(&tx_buffer[0],
164 SHARP_MEMORY_DISPLAY_UPDATE_MODE, vcom);
165 sharp_memory_set_tx_buffer_addresses(&tx_buffer[1], clip, pitch);
166 sharp_memory_set_tx_buffer_data(&tx_buffer[2], fb, clip, pitch, fmtcnv_state);
167
168 ret = sharp_memory_spi_write(smd->spi, tx_buffer, tx_buffer_size);
169
170 mutex_unlock(&smd->tx_mutex);
171
172 return ret;
173 }
174
sharp_memory_maintain_display(struct sharp_memory_device * smd)175 static int sharp_memory_maintain_display(struct sharp_memory_device *smd)
176 {
177 int ret;
178 u8 vcom = smd->vcom;
179 u8 *tx_buffer = smd->tx_buffer;
180
181 mutex_lock(&smd->tx_mutex);
182
183 sharp_memory_set_tx_buffer_mode(&tx_buffer[0], SHARP_MEMORY_DISPLAY_MAINTAIN_MODE, vcom);
184 tx_buffer[1] = 0; /* Write dummy data */
185 ret = sharp_memory_spi_write(smd->spi, tx_buffer, 2);
186
187 mutex_unlock(&smd->tx_mutex);
188
189 return ret;
190 }
191
sharp_memory_clear_display(struct sharp_memory_device * smd)192 static int sharp_memory_clear_display(struct sharp_memory_device *smd)
193 {
194 int ret;
195 u8 vcom = smd->vcom;
196 u8 *tx_buffer = smd->tx_buffer;
197
198 mutex_lock(&smd->tx_mutex);
199
200 sharp_memory_set_tx_buffer_mode(&tx_buffer[0], SHARP_MEMORY_DISPLAY_CLEAR_MODE, vcom);
201 tx_buffer[1] = 0; /* write dummy data */
202 ret = sharp_memory_spi_write(smd->spi, tx_buffer, 2);
203
204 mutex_unlock(&smd->tx_mutex);
205
206 return ret;
207 }
208
sharp_memory_fb_dirty(struct drm_framebuffer * fb,struct drm_rect * rect,struct drm_format_conv_state * fmtconv_state)209 static void sharp_memory_fb_dirty(struct drm_framebuffer *fb, struct drm_rect *rect,
210 struct drm_format_conv_state *fmtconv_state)
211 {
212 struct drm_rect clip;
213 struct sharp_memory_device *smd = drm_to_sharp_memory_device(fb->dev);
214
215 /* Always update a full line regardless of what is dirty */
216 clip.x1 = 0;
217 clip.x2 = fb->width;
218 clip.y1 = rect->y1;
219 clip.y2 = rect->y2;
220
221 sharp_memory_update_display(smd, fb, clip, fmtconv_state);
222 }
223
sharp_memory_plane_atomic_check(struct drm_plane * plane,struct drm_atomic_state * state)224 static int sharp_memory_plane_atomic_check(struct drm_plane *plane,
225 struct drm_atomic_state *state)
226 {
227 struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state, plane);
228 struct sharp_memory_device *smd;
229 struct drm_crtc_state *crtc_state;
230
231 smd = container_of(plane, struct sharp_memory_device, plane);
232 crtc_state = drm_atomic_get_new_crtc_state(state, &smd->crtc);
233
234 return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
235 DRM_PLANE_NO_SCALING,
236 DRM_PLANE_NO_SCALING,
237 false, false);
238 }
239
sharp_memory_plane_atomic_update(struct drm_plane * plane,struct drm_atomic_state * state)240 static void sharp_memory_plane_atomic_update(struct drm_plane *plane,
241 struct drm_atomic_state *state)
242 {
243 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
244 struct drm_plane_state *plane_state = plane->state;
245 struct drm_format_conv_state fmtcnv_state = DRM_FORMAT_CONV_STATE_INIT;
246 struct sharp_memory_device *smd;
247 struct drm_rect rect;
248
249 smd = container_of(plane, struct sharp_memory_device, plane);
250 if (!smd->crtc.state->active)
251 return;
252
253 if (drm_atomic_helper_damage_merged(old_state, plane_state, &rect))
254 sharp_memory_fb_dirty(plane_state->fb, &rect, &fmtcnv_state);
255
256 drm_format_conv_state_release(&fmtcnv_state);
257 }
258
259 static const struct drm_plane_helper_funcs sharp_memory_plane_helper_funcs = {
260 .prepare_fb = drm_gem_plane_helper_prepare_fb,
261 .atomic_check = sharp_memory_plane_atomic_check,
262 .atomic_update = sharp_memory_plane_atomic_update,
263 };
264
sharp_memory_format_mod_supported(struct drm_plane * plane,u32 format,u64 modifier)265 static bool sharp_memory_format_mod_supported(struct drm_plane *plane,
266 u32 format,
267 u64 modifier)
268 {
269 return modifier == DRM_FORMAT_MOD_LINEAR;
270 }
271
272 static const struct drm_plane_funcs sharp_memory_plane_funcs = {
273 .update_plane = drm_atomic_helper_update_plane,
274 .disable_plane = drm_atomic_helper_disable_plane,
275 .destroy = drm_plane_cleanup,
276 .reset = drm_atomic_helper_plane_reset,
277 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
278 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
279 .format_mod_supported = sharp_memory_format_mod_supported,
280 };
281
sharp_memory_crtc_mode_valid(struct drm_crtc * crtc,const struct drm_display_mode * mode)282 static enum drm_mode_status sharp_memory_crtc_mode_valid(struct drm_crtc *crtc,
283 const struct drm_display_mode *mode)
284 {
285 struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev);
286
287 return drm_crtc_helper_mode_valid_fixed(crtc, mode, smd->mode);
288 }
289
sharp_memory_crtc_check(struct drm_crtc * crtc,struct drm_atomic_state * state)290 static int sharp_memory_crtc_check(struct drm_crtc *crtc,
291 struct drm_atomic_state *state)
292 {
293 struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state, crtc);
294 int ret;
295
296 if (!crtc_state->enable)
297 goto out;
298
299 ret = drm_atomic_helper_check_crtc_primary_plane(crtc_state);
300 if (ret)
301 return ret;
302
303 out:
304 return drm_atomic_add_affected_planes(state, crtc);
305 }
306
sharp_memory_sw_vcom_signal_thread(void * data)307 static int sharp_memory_sw_vcom_signal_thread(void *data)
308 {
309 struct sharp_memory_device *smd = data;
310
311 while (!kthread_should_stop()) {
312 smd->vcom ^= 1; /* Toggle vcom */
313 sharp_memory_maintain_display(smd);
314 msleep(1000);
315 }
316
317 return 0;
318 }
319
sharp_memory_crtc_enable(struct drm_crtc * crtc,struct drm_atomic_state * state)320 static void sharp_memory_crtc_enable(struct drm_crtc *crtc,
321 struct drm_atomic_state *state)
322 {
323 struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev);
324
325 sharp_memory_clear_display(smd);
326
327 if (smd->enable_gpio)
328 gpiod_set_value(smd->enable_gpio, 1);
329 }
330
sharp_memory_crtc_disable(struct drm_crtc * crtc,struct drm_atomic_state * state)331 static void sharp_memory_crtc_disable(struct drm_crtc *crtc,
332 struct drm_atomic_state *state)
333 {
334 struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev);
335
336 sharp_memory_clear_display(smd);
337
338 if (smd->enable_gpio)
339 gpiod_set_value(smd->enable_gpio, 0);
340 }
341
342 static const struct drm_crtc_helper_funcs sharp_memory_crtc_helper_funcs = {
343 .mode_valid = sharp_memory_crtc_mode_valid,
344 .atomic_check = sharp_memory_crtc_check,
345 .atomic_enable = sharp_memory_crtc_enable,
346 .atomic_disable = sharp_memory_crtc_disable,
347 };
348
349 static const struct drm_crtc_funcs sharp_memory_crtc_funcs = {
350 .reset = drm_atomic_helper_crtc_reset,
351 .destroy = drm_crtc_cleanup,
352 .set_config = drm_atomic_helper_set_config,
353 .page_flip = drm_atomic_helper_page_flip,
354 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
355 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
356 };
357
358 static const struct drm_encoder_funcs sharp_memory_encoder_funcs = {
359 .destroy = drm_encoder_cleanup,
360 };
361
sharp_memory_connector_get_modes(struct drm_connector * connector)362 static int sharp_memory_connector_get_modes(struct drm_connector *connector)
363 {
364 struct sharp_memory_device *smd = drm_to_sharp_memory_device(connector->dev);
365
366 return drm_connector_helper_get_modes_fixed(connector, smd->mode);
367 }
368
369 static const struct drm_connector_helper_funcs sharp_memory_connector_hfuncs = {
370 .get_modes = sharp_memory_connector_get_modes,
371 };
372
373 static const struct drm_connector_funcs sharp_memory_connector_funcs = {
374 .reset = drm_atomic_helper_connector_reset,
375 .fill_modes = drm_helper_probe_single_connector_modes,
376 .destroy = drm_connector_cleanup,
377 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
378 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
379
380 };
381
382 static const struct drm_mode_config_funcs sharp_memory_mode_config_funcs = {
383 .fb_create = drm_gem_fb_create_with_dirty,
384 .atomic_check = drm_atomic_helper_check,
385 .atomic_commit = drm_atomic_helper_commit,
386 };
387
388 static const struct drm_display_mode sharp_memory_ls010b7dh04_mode = {
389 DRM_SIMPLE_MODE(128, 128, 18, 18),
390 };
391
392 static const struct drm_display_mode sharp_memory_ls011b7dh03_mode = {
393 DRM_SIMPLE_MODE(160, 68, 25, 10),
394 };
395
396 static const struct drm_display_mode sharp_memory_ls012b7dd01_mode = {
397 DRM_SIMPLE_MODE(184, 38, 29, 6),
398 };
399
400 static const struct drm_display_mode sharp_memory_ls013b7dh03_mode = {
401 DRM_SIMPLE_MODE(128, 128, 23, 23),
402 };
403
404 static const struct drm_display_mode sharp_memory_ls013b7dh05_mode = {
405 DRM_SIMPLE_MODE(144, 168, 20, 24),
406 };
407
408 static const struct drm_display_mode sharp_memory_ls018b7dh02_mode = {
409 DRM_SIMPLE_MODE(230, 303, 27, 36),
410 };
411
412 static const struct drm_display_mode sharp_memory_ls027b7dh01_mode = {
413 DRM_SIMPLE_MODE(400, 240, 58, 35),
414 };
415
416 static const struct drm_display_mode sharp_memory_ls032b7dd02_mode = {
417 DRM_SIMPLE_MODE(336, 536, 42, 68),
418 };
419
420 static const struct drm_display_mode sharp_memory_ls044q7dh01_mode = {
421 DRM_SIMPLE_MODE(320, 240, 89, 67),
422 };
423
424 static const struct spi_device_id sharp_memory_ids[] = {
425 {"ls010b7dh04", (kernel_ulong_t)&sharp_memory_ls010b7dh04_mode},
426 {"ls011b7dh03", (kernel_ulong_t)&sharp_memory_ls011b7dh03_mode},
427 {"ls012b7dd01", (kernel_ulong_t)&sharp_memory_ls012b7dd01_mode},
428 {"ls013b7dh03", (kernel_ulong_t)&sharp_memory_ls013b7dh03_mode},
429 {"ls013b7dh05", (kernel_ulong_t)&sharp_memory_ls013b7dh05_mode},
430 {"ls018b7dh02", (kernel_ulong_t)&sharp_memory_ls018b7dh02_mode},
431 {"ls027b7dh01", (kernel_ulong_t)&sharp_memory_ls027b7dh01_mode},
432 {"ls027b7dh01a", (kernel_ulong_t)&sharp_memory_ls027b7dh01_mode},
433 {"ls032b7dd02", (kernel_ulong_t)&sharp_memory_ls032b7dd02_mode},
434 {"ls044q7dh01", (kernel_ulong_t)&sharp_memory_ls044q7dh01_mode},
435 {},
436 };
437 MODULE_DEVICE_TABLE(spi, sharp_memory_ids);
438
439 static const struct of_device_id sharp_memory_of_match[] = {
440 {.compatible = "sharp,ls010b7dh04", &sharp_memory_ls010b7dh04_mode},
441 {.compatible = "sharp,ls011b7dh03", &sharp_memory_ls011b7dh03_mode},
442 {.compatible = "sharp,ls012b7dd01", &sharp_memory_ls012b7dd01_mode},
443 {.compatible = "sharp,ls013b7dh03", &sharp_memory_ls013b7dh03_mode},
444 {.compatible = "sharp,ls013b7dh05", &sharp_memory_ls013b7dh05_mode},
445 {.compatible = "sharp,ls018b7dh02", &sharp_memory_ls018b7dh02_mode},
446 {.compatible = "sharp,ls027b7dh01", &sharp_memory_ls027b7dh01_mode},
447 {.compatible = "sharp,ls027b7dh01a", &sharp_memory_ls027b7dh01_mode},
448 {.compatible = "sharp,ls032b7dd02", &sharp_memory_ls032b7dd02_mode},
449 {.compatible = "sharp,ls044q7dh01", &sharp_memory_ls044q7dh01_mode},
450 {},
451 };
452 MODULE_DEVICE_TABLE(of, sharp_memory_of_match);
453
454 static const u32 sharp_memory_formats[] = {
455 DRM_FORMAT_XRGB8888,
456 };
457
sharp_memory_pipe_init(struct drm_device * dev,struct sharp_memory_device * smd,const u32 * formats,unsigned int format_count,const u64 * format_modifiers)458 static int sharp_memory_pipe_init(struct drm_device *dev,
459 struct sharp_memory_device *smd,
460 const u32 *formats, unsigned int format_count,
461 const u64 *format_modifiers)
462 {
463 int ret;
464 struct drm_encoder *encoder = &smd->encoder;
465 struct drm_plane *plane = &smd->plane;
466 struct drm_crtc *crtc = &smd->crtc;
467 struct drm_connector *connector = &smd->connector;
468
469 drm_plane_helper_add(plane, &sharp_memory_plane_helper_funcs);
470 ret = drm_universal_plane_init(dev, plane, 0,
471 &sharp_memory_plane_funcs,
472 formats, format_count,
473 format_modifiers,
474 DRM_PLANE_TYPE_PRIMARY, NULL);
475 if (ret)
476 return ret;
477
478 drm_crtc_helper_add(crtc, &sharp_memory_crtc_helper_funcs);
479 ret = drm_crtc_init_with_planes(dev, crtc, plane, NULL,
480 &sharp_memory_crtc_funcs, NULL);
481 if (ret)
482 return ret;
483
484 encoder->possible_crtcs = drm_crtc_mask(crtc);
485 ret = drm_encoder_init(dev, encoder, &sharp_memory_encoder_funcs,
486 DRM_MODE_ENCODER_NONE, NULL);
487 if (ret)
488 return ret;
489
490 ret = drm_connector_init(&smd->drm, &smd->connector,
491 &sharp_memory_connector_funcs,
492 DRM_MODE_CONNECTOR_SPI);
493 if (ret)
494 return ret;
495
496 drm_connector_helper_add(&smd->connector,
497 &sharp_memory_connector_hfuncs);
498
499 return drm_connector_attach_encoder(connector, encoder);
500 }
501
sharp_memory_init_pwm_vcom_signal(struct sharp_memory_device * smd)502 static int sharp_memory_init_pwm_vcom_signal(struct sharp_memory_device *smd)
503 {
504 int ret;
505 struct device *dev = &smd->spi->dev;
506 struct pwm_state pwm_state;
507
508 smd->pwm_vcom_signal = devm_pwm_get(dev, NULL);
509 if (IS_ERR(smd->pwm_vcom_signal))
510 return dev_err_probe(dev, PTR_ERR(smd->pwm_vcom_signal),
511 "Could not get pwm device\n");
512
513 pwm_init_state(smd->pwm_vcom_signal, &pwm_state);
514 pwm_set_relative_duty_cycle(&pwm_state, 1, 10);
515 pwm_state.enabled = true;
516 ret = pwm_apply_might_sleep(smd->pwm_vcom_signal, &pwm_state);
517 if (ret)
518 return dev_err_probe(dev, -EINVAL, "Could not apply pwm state\n");
519
520 return 0;
521 }
522
sharp_memory_probe(struct spi_device * spi)523 static int sharp_memory_probe(struct spi_device *spi)
524 {
525 int ret;
526 struct device *dev;
527 struct sharp_memory_device *smd;
528 struct drm_device *drm;
529 const char *vcom_mode_str;
530
531 dev = &spi->dev;
532
533 ret = spi_setup(spi);
534 if (ret < 0)
535 return dev_err_probe(dev, ret, "Failed to setup spi device\n");
536
537 if (!dev->coherent_dma_mask) {
538 ret = dma_coerce_mask_and_coherent(dev, DMA_BIT_MASK(32));
539 if (ret)
540 return dev_err_probe(dev, ret, "Failed to set dma mask\n");
541 }
542
543 smd = devm_drm_dev_alloc(dev, &sharp_memory_drm_driver,
544 struct sharp_memory_device, drm);
545 if (!smd)
546 return -ENOMEM;
547
548 spi_set_drvdata(spi, smd);
549
550 smd->spi = spi;
551 drm = &smd->drm;
552 ret = drmm_mode_config_init(drm);
553 if (ret)
554 return dev_err_probe(dev, ret, "Failed to initialize drm config\n");
555
556 smd->enable_gpio = devm_gpiod_get_optional(dev, "enable", GPIOD_OUT_HIGH);
557 if (!smd->enable_gpio)
558 dev_warn(dev, "Enable gpio not defined\n");
559
560 drm->mode_config.funcs = &sharp_memory_mode_config_funcs;
561 smd->mode = spi_get_device_match_data(spi);
562
563 smd->pitch = (SHARP_ADDR_PERIOD + smd->mode->hdisplay + SHARP_DUMMY_PERIOD) / 8;
564 smd->tx_buffer_size = (SHARP_MODE_PERIOD +
565 (SHARP_ADDR_PERIOD + (smd->mode->hdisplay) + SHARP_DUMMY_PERIOD) *
566 smd->mode->vdisplay) / 8;
567
568 smd->tx_buffer = devm_kzalloc(dev, smd->tx_buffer_size, GFP_KERNEL);
569 if (!smd->tx_buffer)
570 return -ENOMEM;
571
572 mutex_init(&smd->tx_mutex);
573
574 /*
575 * VCOM is a signal that prevents DC bias from being built up in
576 * the panel resulting in pixels being forever stuck in one state.
577 *
578 * This driver supports three different methods to generate this
579 * signal depending on EXTMODE pin:
580 *
581 * software (EXTMODE = L) - This mode uses a kthread to
582 * periodically send a "maintain display" message to the display,
583 * toggling the vcom bit on and off with each message
584 *
585 * external (EXTMODE = H) - This mode relies on an external
586 * clock to generate the signal on the EXTCOMM pin
587 *
588 * pwm (EXTMODE = H) - This mode uses a pwm device to generate
589 * the signal on the EXTCOMM pin
590 *
591 */
592 if (device_property_read_string(dev, "sharp,vcom-mode", &vcom_mode_str))
593 return dev_err_probe(dev, -EINVAL,
594 "Unable to find sharp,vcom-mode node in device tree\n");
595
596 if (!strcmp("software", vcom_mode_str)) {
597 smd->vcom_mode = SHARP_MEMORY_SOFTWARE_VCOM;
598 smd->sw_vcom_signal = kthread_run(sharp_memory_sw_vcom_signal_thread,
599 smd, "sw_vcom_signal");
600
601 } else if (!strcmp("external", vcom_mode_str)) {
602 smd->vcom_mode = SHARP_MEMORY_EXTERNAL_VCOM;
603
604 } else if (!strcmp("pwm", vcom_mode_str)) {
605 smd->vcom_mode = SHARP_MEMORY_PWM_VCOM;
606 ret = sharp_memory_init_pwm_vcom_signal(smd);
607 if (ret)
608 return ret;
609 } else {
610 return dev_err_probe(dev, -EINVAL, "Invalid value set for vcom-mode\n");
611 }
612
613 drm->mode_config.min_width = smd->mode->hdisplay;
614 drm->mode_config.max_width = smd->mode->hdisplay;
615 drm->mode_config.min_height = smd->mode->vdisplay;
616 drm->mode_config.max_height = smd->mode->vdisplay;
617
618 ret = sharp_memory_pipe_init(drm, smd, sharp_memory_formats,
619 ARRAY_SIZE(sharp_memory_formats),
620 NULL);
621 if (ret)
622 return dev_err_probe(dev, ret, "Failed to initialize display pipeline.\n");
623
624 drm_plane_enable_fb_damage_clips(&smd->plane);
625 drm_mode_config_reset(drm);
626
627 ret = drm_dev_register(drm, 0);
628 if (ret)
629 return dev_err_probe(dev, ret, "Failed to register drm device.\n");
630
631 drm_client_setup(drm, NULL);
632
633 return 0;
634 }
635
sharp_memory_remove(struct spi_device * spi)636 static void sharp_memory_remove(struct spi_device *spi)
637 {
638 struct sharp_memory_device *smd = spi_get_drvdata(spi);
639
640 drm_dev_unplug(&smd->drm);
641 drm_atomic_helper_shutdown(&smd->drm);
642
643 switch (smd->vcom_mode) {
644 case SHARP_MEMORY_SOFTWARE_VCOM:
645 kthread_stop(smd->sw_vcom_signal);
646 break;
647
648 case SHARP_MEMORY_EXTERNAL_VCOM:
649 break;
650
651 case SHARP_MEMORY_PWM_VCOM:
652 pwm_disable(smd->pwm_vcom_signal);
653 break;
654 }
655 }
656
657 static struct spi_driver sharp_memory_spi_driver = {
658 .driver = {
659 .name = "sharp_memory",
660 .of_match_table = sharp_memory_of_match,
661 },
662 .probe = sharp_memory_probe,
663 .remove = sharp_memory_remove,
664 .id_table = sharp_memory_ids,
665 };
666 module_spi_driver(sharp_memory_spi_driver);
667
668 MODULE_AUTHOR("Alex Lanzano <lanzano.alex@gmail.com>");
669 MODULE_DESCRIPTION("SPI Protocol driver for the sharp_memory display");
670 MODULE_LICENSE("GPL");
671