xref: /linux/drivers/gpu/drm/tiny/sharp-memory.c (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
2 
3 #include <drm/drm_atomic.h>
4 #include <drm/drm_atomic_helper.h>
5 #include <drm/drm_client_setup.h>
6 #include <drm/drm_connector.h>
7 #include <drm/drm_damage_helper.h>
8 #include <drm/drm_drv.h>
9 #include <drm/drm_fb_dma_helper.h>
10 #include <drm/drm_fbdev_dma.h>
11 #include <drm/drm_format_helper.h>
12 #include <drm/drm_framebuffer.h>
13 #include <drm/drm_gem_atomic_helper.h>
14 #include <drm/drm_gem_dma_helper.h>
15 #include <drm/drm_gem_framebuffer_helper.h>
16 #include <drm/drm_managed.h>
17 #include <drm/drm_modes.h>
18 #include <drm/drm_probe_helper.h>
19 #include <drm/drm_rect.h>
20 #include <linux/bitrev.h>
21 #include <linux/delay.h>
22 #include <linux/gpio/consumer.h>
23 #include <linux/kthread.h>
24 #include <linux/mod_devicetable.h>
25 #include <linux/module.h>
26 #include <linux/mutex.h>
27 #include <linux/pwm.h>
28 #include <linux/spi/spi.h>
29 
30 #define SHARP_MODE_PERIOD 8
31 #define SHARP_ADDR_PERIOD 8
32 #define SHARP_DUMMY_PERIOD 8
33 
34 #define SHARP_MEMORY_DISPLAY_MAINTAIN_MODE 0
35 #define SHARP_MEMORY_DISPLAY_UPDATE_MODE 1
36 #define SHARP_MEMORY_DISPLAY_CLEAR_MODE 4
37 
38 enum sharp_memory_model {
39 	LS010B7DH04,
40 	LS011B7DH03,
41 	LS012B7DD01,
42 	LS013B7DH03,
43 	LS013B7DH05,
44 	LS018B7DH02,
45 	LS027B7DH01,
46 	LS027B7DH01A,
47 	LS032B7DD02,
48 	LS044Q7DH01,
49 };
50 
51 enum sharp_memory_vcom_mode {
52 	SHARP_MEMORY_SOFTWARE_VCOM,
53 	SHARP_MEMORY_EXTERNAL_VCOM,
54 	SHARP_MEMORY_PWM_VCOM
55 };
56 
57 struct sharp_memory_device {
58 	struct drm_device drm;
59 	struct spi_device *spi;
60 
61 	const struct drm_display_mode *mode;
62 
63 	struct drm_crtc crtc;
64 	struct drm_plane plane;
65 	struct drm_encoder encoder;
66 	struct drm_connector connector;
67 
68 	struct gpio_desc *enable_gpio;
69 
70 	struct task_struct *sw_vcom_signal;
71 	struct pwm_device *pwm_vcom_signal;
72 
73 	enum sharp_memory_vcom_mode vcom_mode;
74 	u8 vcom;
75 
76 	u32 pitch;
77 	u32 tx_buffer_size;
78 	u8 *tx_buffer;
79 
80 	/* When vcom_mode == "software" a kthread is used to periodically send a
81 	 * 'maintain display' message over spi. This mutex ensures tx_buffer access
82 	 * and spi bus usage is synchronized in this case.
83 	 */
84 	struct mutex tx_mutex;
85 };
86 
87 static inline int sharp_memory_spi_write(struct spi_device *spi, void *buf, size_t len)
88 {
89 	/* Reverse the bit order */
90 	for (u8 *b = buf; b < ((u8 *)buf) + len; ++b)
91 		*b = bitrev8(*b);
92 
93 	return spi_write(spi, buf, len);
94 }
95 
96 static inline struct sharp_memory_device *drm_to_sharp_memory_device(struct drm_device *drm)
97 {
98 	return container_of(drm, struct sharp_memory_device, drm);
99 }
100 
101 DEFINE_DRM_GEM_DMA_FOPS(sharp_memory_fops);
102 
103 static const struct drm_driver sharp_memory_drm_driver = {
104 	.driver_features	= DRIVER_GEM | DRIVER_MODESET | DRIVER_ATOMIC,
105 	.fops			= &sharp_memory_fops,
106 	DRM_GEM_DMA_DRIVER_OPS_VMAP,
107 	DRM_FBDEV_DMA_DRIVER_OPS,
108 	.name			= "sharp_memory_display",
109 	.desc			= "Sharp Display Memory LCD",
110 	.date			= "20231129",
111 	.major			= 1,
112 	.minor			= 0,
113 };
114 
115 static inline void sharp_memory_set_tx_buffer_mode(u8 *buffer, u8 mode, u8 vcom)
116 {
117 	*buffer = mode | (vcom << 1);
118 }
119 
120 static inline void sharp_memory_set_tx_buffer_addresses(u8 *buffer,
121 							struct drm_rect clip,
122 							u32 pitch)
123 {
124 	for (u32 line = 0; line < clip.y2; ++line)
125 		buffer[line * pitch] = line + 1;
126 }
127 
128 static void sharp_memory_set_tx_buffer_data(u8 *buffer,
129 					    struct drm_framebuffer *fb,
130 					    struct drm_rect clip,
131 					    u32 pitch,
132 					    struct drm_format_conv_state *fmtcnv_state)
133 {
134 	int ret;
135 	struct iosys_map dst, vmap;
136 	struct drm_gem_dma_object *dma_obj = drm_fb_dma_get_gem_obj(fb, 0);
137 
138 	ret = drm_gem_fb_begin_cpu_access(fb, DMA_FROM_DEVICE);
139 	if (ret)
140 		return;
141 
142 	iosys_map_set_vaddr(&dst, buffer);
143 	iosys_map_set_vaddr(&vmap, dma_obj->vaddr);
144 
145 	drm_fb_xrgb8888_to_mono(&dst, &pitch, &vmap, fb, &clip, fmtcnv_state);
146 
147 	drm_gem_fb_end_cpu_access(fb, DMA_FROM_DEVICE);
148 }
149 
150 static int sharp_memory_update_display(struct sharp_memory_device *smd,
151 				       struct drm_framebuffer *fb,
152 				       struct drm_rect clip,
153 				       struct drm_format_conv_state *fmtcnv_state)
154 {
155 	int ret;
156 	u32 pitch = smd->pitch;
157 	u8 vcom = smd->vcom;
158 	u8 *tx_buffer = smd->tx_buffer;
159 	u32 tx_buffer_size = smd->tx_buffer_size;
160 
161 	mutex_lock(&smd->tx_mutex);
162 
163 	/* Populate the transmit buffer with frame data */
164 	sharp_memory_set_tx_buffer_mode(&tx_buffer[0],
165 					SHARP_MEMORY_DISPLAY_UPDATE_MODE, vcom);
166 	sharp_memory_set_tx_buffer_addresses(&tx_buffer[1], clip, pitch);
167 	sharp_memory_set_tx_buffer_data(&tx_buffer[2], fb, clip, pitch, fmtcnv_state);
168 
169 	ret = sharp_memory_spi_write(smd->spi, tx_buffer, tx_buffer_size);
170 
171 	mutex_unlock(&smd->tx_mutex);
172 
173 	return ret;
174 }
175 
176 static int sharp_memory_maintain_display(struct sharp_memory_device *smd)
177 {
178 	int ret;
179 	u8 vcom = smd->vcom;
180 	u8 *tx_buffer = smd->tx_buffer;
181 
182 	mutex_lock(&smd->tx_mutex);
183 
184 	sharp_memory_set_tx_buffer_mode(&tx_buffer[0], SHARP_MEMORY_DISPLAY_MAINTAIN_MODE, vcom);
185 	tx_buffer[1] = 0; /* Write dummy data */
186 	ret = sharp_memory_spi_write(smd->spi, tx_buffer, 2);
187 
188 	mutex_unlock(&smd->tx_mutex);
189 
190 	return ret;
191 }
192 
193 static int sharp_memory_clear_display(struct sharp_memory_device *smd)
194 {
195 	int ret;
196 	u8 vcom = smd->vcom;
197 	u8 *tx_buffer = smd->tx_buffer;
198 
199 	mutex_lock(&smd->tx_mutex);
200 
201 	sharp_memory_set_tx_buffer_mode(&tx_buffer[0], SHARP_MEMORY_DISPLAY_CLEAR_MODE, vcom);
202 	tx_buffer[1] = 0; /* write dummy data */
203 	ret = sharp_memory_spi_write(smd->spi, tx_buffer, 2);
204 
205 	mutex_unlock(&smd->tx_mutex);
206 
207 	return ret;
208 }
209 
210 static void sharp_memory_fb_dirty(struct drm_framebuffer *fb, struct drm_rect *rect,
211 				  struct drm_format_conv_state *fmtconv_state)
212 {
213 	struct drm_rect clip;
214 	struct sharp_memory_device *smd = drm_to_sharp_memory_device(fb->dev);
215 
216 	/* Always update a full line regardless of what is dirty */
217 	clip.x1 = 0;
218 	clip.x2 = fb->width;
219 	clip.y1 = rect->y1;
220 	clip.y2 = rect->y2;
221 
222 	sharp_memory_update_display(smd, fb, clip, fmtconv_state);
223 }
224 
225 static int sharp_memory_plane_atomic_check(struct drm_plane *plane,
226 					   struct drm_atomic_state *state)
227 {
228 	struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state, plane);
229 	struct sharp_memory_device *smd;
230 	struct drm_crtc_state *crtc_state;
231 
232 	smd = container_of(plane, struct sharp_memory_device, plane);
233 	crtc_state = drm_atomic_get_new_crtc_state(state, &smd->crtc);
234 
235 	return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
236 						   DRM_PLANE_NO_SCALING,
237 						   DRM_PLANE_NO_SCALING,
238 						   false, false);
239 }
240 
241 static void sharp_memory_plane_atomic_update(struct drm_plane *plane,
242 					     struct drm_atomic_state *state)
243 {
244 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
245 	struct drm_plane_state *plane_state = plane->state;
246 	struct drm_format_conv_state fmtcnv_state = DRM_FORMAT_CONV_STATE_INIT;
247 	struct sharp_memory_device *smd;
248 	struct drm_rect rect;
249 
250 	smd = container_of(plane, struct sharp_memory_device, plane);
251 	if (!smd->crtc.state->active)
252 		return;
253 
254 	if (drm_atomic_helper_damage_merged(old_state, plane_state, &rect))
255 		sharp_memory_fb_dirty(plane_state->fb, &rect, &fmtcnv_state);
256 
257 	drm_format_conv_state_release(&fmtcnv_state);
258 }
259 
260 static const struct drm_plane_helper_funcs sharp_memory_plane_helper_funcs = {
261 	.prepare_fb = drm_gem_plane_helper_prepare_fb,
262 	.atomic_check = sharp_memory_plane_atomic_check,
263 	.atomic_update = sharp_memory_plane_atomic_update,
264 };
265 
266 static bool sharp_memory_format_mod_supported(struct drm_plane *plane,
267 					      u32 format,
268 					      u64 modifier)
269 {
270 	return modifier == DRM_FORMAT_MOD_LINEAR;
271 }
272 
273 static const struct drm_plane_funcs sharp_memory_plane_funcs = {
274 	.update_plane = drm_atomic_helper_update_plane,
275 	.disable_plane = drm_atomic_helper_disable_plane,
276 	.destroy = drm_plane_cleanup,
277 	.reset = drm_atomic_helper_plane_reset,
278 	.atomic_duplicate_state	= drm_atomic_helper_plane_duplicate_state,
279 	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
280 	.format_mod_supported = sharp_memory_format_mod_supported,
281 };
282 
283 static enum drm_mode_status sharp_memory_crtc_mode_valid(struct drm_crtc *crtc,
284 							 const struct drm_display_mode *mode)
285 {
286 	struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev);
287 
288 	return drm_crtc_helper_mode_valid_fixed(crtc, mode, smd->mode);
289 }
290 
291 static int sharp_memory_crtc_check(struct drm_crtc *crtc,
292 				   struct drm_atomic_state *state)
293 {
294 	struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state, crtc);
295 	int ret;
296 
297 	if (!crtc_state->enable)
298 		goto out;
299 
300 	ret = drm_atomic_helper_check_crtc_primary_plane(crtc_state);
301 	if (ret)
302 		return ret;
303 
304 out:
305 	return drm_atomic_add_affected_planes(state, crtc);
306 }
307 
308 static int sharp_memory_sw_vcom_signal_thread(void *data)
309 {
310 	struct sharp_memory_device *smd = data;
311 
312 	while (!kthread_should_stop()) {
313 		smd->vcom ^= 1; /* Toggle vcom */
314 		sharp_memory_maintain_display(smd);
315 		msleep(1000);
316 	}
317 
318 	return 0;
319 }
320 
321 static void sharp_memory_crtc_enable(struct drm_crtc *crtc,
322 				     struct drm_atomic_state *state)
323 {
324 	struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev);
325 
326 	sharp_memory_clear_display(smd);
327 
328 	if (smd->enable_gpio)
329 		gpiod_set_value(smd->enable_gpio, 1);
330 }
331 
332 static void sharp_memory_crtc_disable(struct drm_crtc *crtc,
333 				      struct drm_atomic_state *state)
334 {
335 	struct sharp_memory_device *smd = drm_to_sharp_memory_device(crtc->dev);
336 
337 	sharp_memory_clear_display(smd);
338 
339 	if (smd->enable_gpio)
340 		gpiod_set_value(smd->enable_gpio, 0);
341 }
342 
343 static const struct drm_crtc_helper_funcs sharp_memory_crtc_helper_funcs = {
344 	.mode_valid = sharp_memory_crtc_mode_valid,
345 	.atomic_check = sharp_memory_crtc_check,
346 	.atomic_enable = sharp_memory_crtc_enable,
347 	.atomic_disable = sharp_memory_crtc_disable,
348 };
349 
350 static const struct drm_crtc_funcs sharp_memory_crtc_funcs = {
351 	.reset = drm_atomic_helper_crtc_reset,
352 	.destroy = drm_crtc_cleanup,
353 	.set_config = drm_atomic_helper_set_config,
354 	.page_flip = drm_atomic_helper_page_flip,
355 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
356 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
357 };
358 
359 static const struct drm_encoder_funcs sharp_memory_encoder_funcs = {
360 	.destroy = drm_encoder_cleanup,
361 };
362 
363 static int sharp_memory_connector_get_modes(struct drm_connector *connector)
364 {
365 	struct sharp_memory_device *smd = drm_to_sharp_memory_device(connector->dev);
366 
367 	return drm_connector_helper_get_modes_fixed(connector, smd->mode);
368 }
369 
370 static const struct drm_connector_helper_funcs sharp_memory_connector_hfuncs = {
371 	.get_modes = sharp_memory_connector_get_modes,
372 };
373 
374 static const struct drm_connector_funcs sharp_memory_connector_funcs = {
375 	.reset = drm_atomic_helper_connector_reset,
376 	.fill_modes = drm_helper_probe_single_connector_modes,
377 	.destroy = drm_connector_cleanup,
378 	.atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
379 	.atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
380 
381 };
382 
383 static const struct drm_mode_config_funcs sharp_memory_mode_config_funcs = {
384 	.fb_create = drm_gem_fb_create_with_dirty,
385 	.atomic_check = drm_atomic_helper_check,
386 	.atomic_commit = drm_atomic_helper_commit,
387 };
388 
389 static const struct drm_display_mode sharp_memory_ls010b7dh04_mode = {
390 	DRM_SIMPLE_MODE(128, 128, 18, 18),
391 };
392 
393 static const struct drm_display_mode sharp_memory_ls011b7dh03_mode = {
394 	DRM_SIMPLE_MODE(160, 68, 25, 10),
395 };
396 
397 static const struct drm_display_mode sharp_memory_ls012b7dd01_mode = {
398 	DRM_SIMPLE_MODE(184, 38, 29, 6),
399 };
400 
401 static const struct drm_display_mode sharp_memory_ls013b7dh03_mode = {
402 	DRM_SIMPLE_MODE(128, 128, 23, 23),
403 };
404 
405 static const struct drm_display_mode sharp_memory_ls013b7dh05_mode = {
406 	DRM_SIMPLE_MODE(144, 168, 20, 24),
407 };
408 
409 static const struct drm_display_mode sharp_memory_ls018b7dh02_mode = {
410 	DRM_SIMPLE_MODE(230, 303, 27, 36),
411 };
412 
413 static const struct drm_display_mode sharp_memory_ls027b7dh01_mode = {
414 	DRM_SIMPLE_MODE(400, 240, 58, 35),
415 };
416 
417 static const struct drm_display_mode sharp_memory_ls032b7dd02_mode = {
418 	DRM_SIMPLE_MODE(336, 536, 42, 68),
419 };
420 
421 static const struct drm_display_mode sharp_memory_ls044q7dh01_mode = {
422 	DRM_SIMPLE_MODE(320, 240, 89, 67),
423 };
424 
425 static const struct spi_device_id sharp_memory_ids[] = {
426 	{"ls010b7dh04", (kernel_ulong_t)&sharp_memory_ls010b7dh04_mode},
427 	{"ls011b7dh03", (kernel_ulong_t)&sharp_memory_ls011b7dh03_mode},
428 	{"ls012b7dd01", (kernel_ulong_t)&sharp_memory_ls012b7dd01_mode},
429 	{"ls013b7dh03", (kernel_ulong_t)&sharp_memory_ls013b7dh03_mode},
430 	{"ls013b7dh05", (kernel_ulong_t)&sharp_memory_ls013b7dh05_mode},
431 	{"ls018b7dh02", (kernel_ulong_t)&sharp_memory_ls018b7dh02_mode},
432 	{"ls027b7dh01", (kernel_ulong_t)&sharp_memory_ls027b7dh01_mode},
433 	{"ls027b7dh01a", (kernel_ulong_t)&sharp_memory_ls027b7dh01_mode},
434 	{"ls032b7dd02", (kernel_ulong_t)&sharp_memory_ls032b7dd02_mode},
435 	{"ls044q7dh01", (kernel_ulong_t)&sharp_memory_ls044q7dh01_mode},
436 	{},
437 };
438 MODULE_DEVICE_TABLE(spi, sharp_memory_ids);
439 
440 static const struct of_device_id sharp_memory_of_match[] = {
441 	{.compatible = "sharp,ls010b7dh04", &sharp_memory_ls010b7dh04_mode},
442 	{.compatible = "sharp,ls011b7dh03", &sharp_memory_ls011b7dh03_mode},
443 	{.compatible = "sharp,ls012b7dd01", &sharp_memory_ls012b7dd01_mode},
444 	{.compatible = "sharp,ls013b7dh03", &sharp_memory_ls013b7dh03_mode},
445 	{.compatible = "sharp,ls013b7dh05", &sharp_memory_ls013b7dh05_mode},
446 	{.compatible = "sharp,ls018b7dh02", &sharp_memory_ls018b7dh02_mode},
447 	{.compatible = "sharp,ls027b7dh01", &sharp_memory_ls027b7dh01_mode},
448 	{.compatible = "sharp,ls027b7dh01a", &sharp_memory_ls027b7dh01_mode},
449 	{.compatible = "sharp,ls032b7dd02", &sharp_memory_ls032b7dd02_mode},
450 	{.compatible = "sharp,ls044q7dh01", &sharp_memory_ls044q7dh01_mode},
451 	{},
452 };
453 MODULE_DEVICE_TABLE(of, sharp_memory_of_match);
454 
455 static const u32 sharp_memory_formats[] = {
456 	DRM_FORMAT_XRGB8888,
457 };
458 
459 static int sharp_memory_pipe_init(struct drm_device *dev,
460 				  struct sharp_memory_device *smd,
461 				  const u32 *formats, unsigned int format_count,
462 				  const u64 *format_modifiers)
463 {
464 	int ret;
465 	struct drm_encoder *encoder = &smd->encoder;
466 	struct drm_plane *plane = &smd->plane;
467 	struct drm_crtc *crtc = &smd->crtc;
468 	struct drm_connector *connector = &smd->connector;
469 
470 	drm_plane_helper_add(plane, &sharp_memory_plane_helper_funcs);
471 	ret = drm_universal_plane_init(dev, plane, 0,
472 				       &sharp_memory_plane_funcs,
473 				       formats, format_count,
474 				       format_modifiers,
475 				       DRM_PLANE_TYPE_PRIMARY, NULL);
476 	if (ret)
477 		return ret;
478 
479 	drm_crtc_helper_add(crtc, &sharp_memory_crtc_helper_funcs);
480 	ret = drm_crtc_init_with_planes(dev, crtc, plane, NULL,
481 					&sharp_memory_crtc_funcs, NULL);
482 	if (ret)
483 		return ret;
484 
485 	encoder->possible_crtcs = drm_crtc_mask(crtc);
486 	ret = drm_encoder_init(dev, encoder, &sharp_memory_encoder_funcs,
487 			       DRM_MODE_ENCODER_NONE, NULL);
488 	if (ret)
489 		return ret;
490 
491 	ret = drm_connector_init(&smd->drm, &smd->connector,
492 				 &sharp_memory_connector_funcs,
493 				 DRM_MODE_CONNECTOR_SPI);
494 	if (ret)
495 		return ret;
496 
497 	drm_connector_helper_add(&smd->connector,
498 				 &sharp_memory_connector_hfuncs);
499 
500 	return drm_connector_attach_encoder(connector, encoder);
501 }
502 
503 static int sharp_memory_init_pwm_vcom_signal(struct sharp_memory_device *smd)
504 {
505 	int ret;
506 	struct device *dev = &smd->spi->dev;
507 	struct pwm_state pwm_state;
508 
509 	smd->pwm_vcom_signal = devm_pwm_get(dev, NULL);
510 	if (IS_ERR(smd->pwm_vcom_signal))
511 		return dev_err_probe(dev, PTR_ERR(smd->pwm_vcom_signal),
512 				     "Could not get pwm device\n");
513 
514 	pwm_init_state(smd->pwm_vcom_signal, &pwm_state);
515 	pwm_set_relative_duty_cycle(&pwm_state, 1, 10);
516 	pwm_state.enabled = true;
517 	ret = pwm_apply_might_sleep(smd->pwm_vcom_signal, &pwm_state);
518 	if (ret)
519 		return dev_err_probe(dev, -EINVAL, "Could not apply pwm state\n");
520 
521 	return 0;
522 }
523 
524 static int sharp_memory_probe(struct spi_device *spi)
525 {
526 	int ret;
527 	struct device *dev;
528 	struct sharp_memory_device *smd;
529 	struct drm_device *drm;
530 	const char *vcom_mode_str;
531 
532 	dev = &spi->dev;
533 
534 	ret = spi_setup(spi);
535 	if (ret < 0)
536 		return dev_err_probe(dev, ret, "Failed to setup spi device\n");
537 
538 	if (!dev->coherent_dma_mask) {
539 		ret = dma_coerce_mask_and_coherent(dev, DMA_BIT_MASK(32));
540 		if (ret)
541 			return dev_err_probe(dev, ret, "Failed to set dma mask\n");
542 	}
543 
544 	smd = devm_drm_dev_alloc(dev, &sharp_memory_drm_driver,
545 				 struct sharp_memory_device, drm);
546 	if (!smd)
547 		return -ENOMEM;
548 
549 	spi_set_drvdata(spi, smd);
550 
551 	smd->spi = spi;
552 	drm = &smd->drm;
553 	ret = drmm_mode_config_init(drm);
554 	if (ret)
555 		return dev_err_probe(dev, ret, "Failed to initialize drm config\n");
556 
557 	smd->enable_gpio = devm_gpiod_get_optional(dev, "enable", GPIOD_OUT_HIGH);
558 	if (!smd->enable_gpio)
559 		dev_warn(dev, "Enable gpio not defined\n");
560 
561 	drm->mode_config.funcs = &sharp_memory_mode_config_funcs;
562 	smd->mode = spi_get_device_match_data(spi);
563 
564 	smd->pitch = (SHARP_ADDR_PERIOD + smd->mode->hdisplay + SHARP_DUMMY_PERIOD) / 8;
565 	smd->tx_buffer_size = (SHARP_MODE_PERIOD +
566 			       (SHARP_ADDR_PERIOD + (smd->mode->hdisplay) + SHARP_DUMMY_PERIOD) *
567 			       smd->mode->vdisplay) / 8;
568 
569 	smd->tx_buffer = devm_kzalloc(dev, smd->tx_buffer_size, GFP_KERNEL);
570 	if (!smd->tx_buffer)
571 		return -ENOMEM;
572 
573 	mutex_init(&smd->tx_mutex);
574 
575 	/*
576 	 * VCOM is a signal that prevents DC bias from being built up in
577 	 * the panel resulting in pixels being forever stuck in one state.
578 	 *
579 	 * This driver supports three different methods to generate this
580 	 * signal depending on EXTMODE pin:
581 	 *
582 	 * software (EXTMODE = L) - This mode uses a kthread to
583 	 * periodically send a "maintain display" message to the display,
584 	 * toggling the vcom bit on and off with each message
585 	 *
586 	 * external (EXTMODE = H) - This mode relies on an external
587 	 * clock to generate the signal on the EXTCOMM pin
588 	 *
589 	 * pwm (EXTMODE = H) - This mode uses a pwm device to generate
590 	 * the signal on the EXTCOMM pin
591 	 *
592 	 */
593 	if (device_property_read_string(dev, "sharp,vcom-mode", &vcom_mode_str))
594 		return dev_err_probe(dev, -EINVAL,
595 				     "Unable to find sharp,vcom-mode node in device tree\n");
596 
597 	if (!strcmp("software", vcom_mode_str)) {
598 		smd->vcom_mode = SHARP_MEMORY_SOFTWARE_VCOM;
599 		smd->sw_vcom_signal = kthread_run(sharp_memory_sw_vcom_signal_thread,
600 						  smd, "sw_vcom_signal");
601 
602 	} else if (!strcmp("external", vcom_mode_str)) {
603 		smd->vcom_mode = SHARP_MEMORY_EXTERNAL_VCOM;
604 
605 	} else if (!strcmp("pwm", vcom_mode_str)) {
606 		smd->vcom_mode = SHARP_MEMORY_PWM_VCOM;
607 		ret = sharp_memory_init_pwm_vcom_signal(smd);
608 		if (ret)
609 			return ret;
610 	} else {
611 		return dev_err_probe(dev, -EINVAL, "Invalid value set for vcom-mode\n");
612 	}
613 
614 	drm->mode_config.min_width = smd->mode->hdisplay;
615 	drm->mode_config.max_width = smd->mode->hdisplay;
616 	drm->mode_config.min_height = smd->mode->vdisplay;
617 	drm->mode_config.max_height = smd->mode->vdisplay;
618 
619 	ret = sharp_memory_pipe_init(drm, smd, sharp_memory_formats,
620 				     ARRAY_SIZE(sharp_memory_formats),
621 				     NULL);
622 	if (ret)
623 		return dev_err_probe(dev, ret, "Failed to initialize display pipeline.\n");
624 
625 	drm_plane_enable_fb_damage_clips(&smd->plane);
626 	drm_mode_config_reset(drm);
627 
628 	ret = drm_dev_register(drm, 0);
629 	if (ret)
630 		return dev_err_probe(dev, ret, "Failed to register drm device.\n");
631 
632 	drm_client_setup(drm, NULL);
633 
634 	return 0;
635 }
636 
637 static void sharp_memory_remove(struct spi_device *spi)
638 {
639 	struct sharp_memory_device *smd = spi_get_drvdata(spi);
640 
641 	drm_dev_unplug(&smd->drm);
642 	drm_atomic_helper_shutdown(&smd->drm);
643 
644 	switch (smd->vcom_mode) {
645 	case SHARP_MEMORY_SOFTWARE_VCOM:
646 		kthread_stop(smd->sw_vcom_signal);
647 		break;
648 
649 	case SHARP_MEMORY_EXTERNAL_VCOM:
650 		break;
651 
652 	case SHARP_MEMORY_PWM_VCOM:
653 		pwm_disable(smd->pwm_vcom_signal);
654 		break;
655 	}
656 }
657 
658 static struct spi_driver sharp_memory_spi_driver = {
659 	.driver = {
660 		.name = "sharp_memory",
661 		.of_match_table = sharp_memory_of_match,
662 	},
663 	.probe = sharp_memory_probe,
664 	.remove = sharp_memory_remove,
665 	.id_table = sharp_memory_ids,
666 };
667 module_spi_driver(sharp_memory_spi_driver);
668 
669 MODULE_AUTHOR("Alex Lanzano <lanzano.alex@gmail.com>");
670 MODULE_DESCRIPTION("SPI Protocol driver for the sharp_memory display");
671 MODULE_LICENSE("GPL");
672