1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * STM32 DMA2D - 2D Graphics Accelerator Driver
4 *
5 * Copyright (c) 2021 Dillon Min
6 * Dillon Min, <dillon.minfei@gmail.com>
7 *
8 * based on s5p-g2d
9 *
10 * Copyright (c) 2011 Samsung Electronics Co., Ltd.
11 * Kamil Debski, <k.debski@samsung.com>
12 */
13
14 #include <linux/module.h>
15 #include <linux/fs.h>
16 #include <linux/timer.h>
17 #include <linux/sched.h>
18 #include <linux/slab.h>
19 #include <linux/clk.h>
20 #include <linux/interrupt.h>
21 #include <linux/of.h>
22
23 #include <linux/platform_device.h>
24 #include <media/v4l2-mem2mem.h>
25 #include <media/v4l2-device.h>
26 #include <media/v4l2-ioctl.h>
27 #include <media/v4l2-event.h>
28 #include <media/videobuf2-v4l2.h>
29 #include <media/videobuf2-dma-contig.h>
30
31 #include "dma2d.h"
32 #include "dma2d-regs.h"
33
34 /*
35 * This V4L2 subdev m2m driver enables Chrom-Art Accelerator unit
36 * of STMicroelectronics STM32 SoC series.
37 *
38 * Currently support r2m, m2m, m2m_pfc.
39 *
40 * - r2m, Filling a part or the whole of a destination image with a specific
41 * color.
42 * - m2m, Copying a part or the whole of a source image into a part or the
43 * whole of a destination.
44 * - m2m_pfc, Copying a part or the whole of a source image into a part or the
45 * whole of a destination image with a pixel format conversion.
46 */
47
file2ctx(struct file * filp)48 static inline struct dma2d_ctx *file2ctx(struct file *filp)
49 {
50 return container_of(file_to_v4l2_fh(filp), struct dma2d_ctx, fh);
51 }
52
53 static const struct dma2d_fmt formats[] = {
54 {
55 .fourcc = V4L2_PIX_FMT_ARGB32,
56 .cmode = DMA2D_CMODE_ARGB8888,
57 .depth = 32,
58 },
59 {
60 .fourcc = V4L2_PIX_FMT_RGB24,
61 .cmode = DMA2D_CMODE_RGB888,
62 .depth = 24,
63 },
64 {
65 .fourcc = V4L2_PIX_FMT_RGB565,
66 .cmode = DMA2D_CMODE_RGB565,
67 .depth = 16,
68 },
69 {
70 .fourcc = V4L2_PIX_FMT_ARGB555,
71 .cmode = DMA2D_CMODE_ARGB1555,
72 .depth = 16,
73 },
74 {
75 .fourcc = V4L2_PIX_FMT_ARGB444,
76 .cmode = DMA2D_CMODE_ARGB4444,
77 .depth = 16,
78 },
79 };
80
81 #define NUM_FORMATS ARRAY_SIZE(formats)
82
83 static const struct dma2d_frame def_frame = {
84 .width = DEFAULT_WIDTH,
85 .height = DEFAULT_HEIGHT,
86 .line_offset = 0,
87 .a_rgb = {0x00, 0x00, 0x00, 0xff},
88 .a_mode = DMA2D_ALPHA_MODE_NO_MODIF,
89 .fmt = (struct dma2d_fmt *)&formats[0],
90 .size = DEFAULT_SIZE,
91 };
92
find_fmt(int pixelformat)93 static struct dma2d_fmt *find_fmt(int pixelformat)
94 {
95 unsigned int i;
96
97 for (i = 0; i < NUM_FORMATS; i++) {
98 if (formats[i].fourcc == pixelformat)
99 return (struct dma2d_fmt *)&formats[i];
100 }
101
102 return NULL;
103 }
104
get_frame(struct dma2d_ctx * ctx,enum v4l2_buf_type type)105 static struct dma2d_frame *get_frame(struct dma2d_ctx *ctx,
106 enum v4l2_buf_type type)
107 {
108 return V4L2_TYPE_IS_OUTPUT(type) ? &ctx->cap : &ctx->out;
109 }
110
dma2d_queue_setup(struct vb2_queue * vq,unsigned int * nbuffers,unsigned int * nplanes,unsigned int sizes[],struct device * alloc_devs[])111 static int dma2d_queue_setup(struct vb2_queue *vq,
112 unsigned int *nbuffers, unsigned int *nplanes,
113 unsigned int sizes[], struct device *alloc_devs[])
114 {
115 struct dma2d_ctx *ctx = vb2_get_drv_priv(vq);
116 struct dma2d_frame *f = get_frame(ctx, vq->type);
117
118 if (*nplanes)
119 return sizes[0] < f->size ? -EINVAL : 0;
120
121 sizes[0] = f->size;
122 *nplanes = 1;
123
124 return 0;
125 }
126
dma2d_buf_out_validate(struct vb2_buffer * vb)127 static int dma2d_buf_out_validate(struct vb2_buffer *vb)
128 {
129 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
130
131 if (vbuf->field == V4L2_FIELD_ANY)
132 vbuf->field = V4L2_FIELD_NONE;
133 if (vbuf->field != V4L2_FIELD_NONE)
134 return -EINVAL;
135
136 return 0;
137 }
138
dma2d_buf_prepare(struct vb2_buffer * vb)139 static int dma2d_buf_prepare(struct vb2_buffer *vb)
140 {
141 struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
142 struct dma2d_frame *f = get_frame(ctx, vb->vb2_queue->type);
143
144 if (vb2_plane_size(vb, 0) < f->size)
145 return -EINVAL;
146
147 vb2_set_plane_payload(vb, 0, f->size);
148
149 return 0;
150 }
151
dma2d_buf_queue(struct vb2_buffer * vb)152 static void dma2d_buf_queue(struct vb2_buffer *vb)
153 {
154 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
155 struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
156
157 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
158 }
159
dma2d_start_streaming(struct vb2_queue * q,unsigned int count)160 static int dma2d_start_streaming(struct vb2_queue *q, unsigned int count)
161 {
162 struct dma2d_ctx *ctx = vb2_get_drv_priv(q);
163 struct dma2d_frame *f = get_frame(ctx, q->type);
164
165 f->sequence = 0;
166 return 0;
167 }
168
dma2d_stop_streaming(struct vb2_queue * q)169 static void dma2d_stop_streaming(struct vb2_queue *q)
170 {
171 struct dma2d_ctx *ctx = vb2_get_drv_priv(q);
172 struct vb2_v4l2_buffer *vbuf;
173
174 for (;;) {
175 if (V4L2_TYPE_IS_OUTPUT(q->type))
176 vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
177 else
178 vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
179 if (!vbuf)
180 return;
181 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
182 }
183 }
184
185 static const struct vb2_ops dma2d_qops = {
186 .queue_setup = dma2d_queue_setup,
187 .buf_out_validate = dma2d_buf_out_validate,
188 .buf_prepare = dma2d_buf_prepare,
189 .buf_queue = dma2d_buf_queue,
190 .start_streaming = dma2d_start_streaming,
191 .stop_streaming = dma2d_stop_streaming,
192 };
193
queue_init(void * priv,struct vb2_queue * src_vq,struct vb2_queue * dst_vq)194 static int queue_init(void *priv, struct vb2_queue *src_vq,
195 struct vb2_queue *dst_vq)
196 {
197 struct dma2d_ctx *ctx = priv;
198 int ret;
199
200 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
201 src_vq->io_modes = VB2_MMAP | VB2_DMABUF;
202 src_vq->drv_priv = ctx;
203 src_vq->ops = &dma2d_qops;
204 src_vq->mem_ops = &vb2_dma_contig_memops;
205 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
206 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
207 src_vq->lock = &ctx->dev->mutex;
208 src_vq->dev = ctx->dev->v4l2_dev.dev;
209
210 ret = vb2_queue_init(src_vq);
211 if (ret)
212 return ret;
213
214 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
215 dst_vq->io_modes = VB2_MMAP | VB2_DMABUF;
216 dst_vq->drv_priv = ctx;
217 dst_vq->ops = &dma2d_qops;
218 dst_vq->mem_ops = &vb2_dma_contig_memops;
219 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
220 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
221 dst_vq->lock = &ctx->dev->mutex;
222 dst_vq->dev = ctx->dev->v4l2_dev.dev;
223
224 return vb2_queue_init(dst_vq);
225 }
226
dma2d_s_ctrl(struct v4l2_ctrl * ctrl)227 static int dma2d_s_ctrl(struct v4l2_ctrl *ctrl)
228 {
229 struct dma2d_frame *frm;
230 struct dma2d_ctx *ctx = container_of(ctrl->handler, struct dma2d_ctx,
231 ctrl_handler);
232 unsigned long flags;
233
234 spin_lock_irqsave(&ctx->dev->ctrl_lock, flags);
235 switch (ctrl->id) {
236 case V4L2_CID_COLORFX:
237 if (ctrl->val == V4L2_COLORFX_SET_RGB)
238 ctx->op_mode = DMA2D_MODE_R2M;
239 else if (ctrl->val == V4L2_COLORFX_NONE)
240 ctx->op_mode = DMA2D_MODE_M2M;
241 break;
242 case V4L2_CID_COLORFX_RGB:
243 frm = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
244 frm->a_rgb[2] = (ctrl->val >> 16) & 0xff;
245 frm->a_rgb[1] = (ctrl->val >> 8) & 0xff;
246 frm->a_rgb[0] = (ctrl->val >> 0) & 0xff;
247 break;
248 default:
249 spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
250 return -EINVAL;
251 }
252 spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
253
254 return 0;
255 }
256
257 static const struct v4l2_ctrl_ops dma2d_ctrl_ops = {
258 .s_ctrl = dma2d_s_ctrl,
259 };
260
dma2d_setup_ctrls(struct dma2d_ctx * ctx)261 static int dma2d_setup_ctrls(struct dma2d_ctx *ctx)
262 {
263 struct v4l2_ctrl_handler *handler = &ctx->ctrl_handler;
264
265 v4l2_ctrl_handler_init(handler, 2);
266
267 v4l2_ctrl_new_std_menu(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX,
268 V4L2_COLORFX_SET_RGB, ~0x10001,
269 V4L2_COLORFX_NONE);
270
271 v4l2_ctrl_new_std(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX_RGB, 0,
272 0xffffff, 1, 0);
273
274 return 0;
275 }
276
dma2d_open(struct file * file)277 static int dma2d_open(struct file *file)
278 {
279 struct dma2d_dev *dev = video_drvdata(file);
280 struct dma2d_ctx *ctx = NULL;
281 int ret = 0;
282
283 ctx = kzalloc_obj(*ctx);
284 if (!ctx)
285 return -ENOMEM;
286 ctx->dev = dev;
287 /* Set default formats */
288 ctx->cap = def_frame;
289 ctx->bg = def_frame;
290 ctx->out = def_frame;
291 ctx->op_mode = DMA2D_MODE_M2M_FPC;
292 ctx->colorspace = V4L2_COLORSPACE_REC709;
293 if (mutex_lock_interruptible(&dev->mutex)) {
294 kfree(ctx);
295 return -ERESTARTSYS;
296 }
297
298 ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
299 if (IS_ERR(ctx->fh.m2m_ctx)) {
300 ret = PTR_ERR(ctx->fh.m2m_ctx);
301 mutex_unlock(&dev->mutex);
302 kfree(ctx);
303 return ret;
304 }
305
306 v4l2_fh_init(&ctx->fh, video_devdata(file));
307 v4l2_fh_add(&ctx->fh, file);
308
309 dma2d_setup_ctrls(ctx);
310
311 /* Write the default values to the ctx struct */
312 v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
313
314 ctx->fh.ctrl_handler = &ctx->ctrl_handler;
315 mutex_unlock(&dev->mutex);
316
317 return 0;
318 }
319
dma2d_release(struct file * file)320 static int dma2d_release(struct file *file)
321 {
322 struct dma2d_dev *dev = video_drvdata(file);
323 struct dma2d_ctx *ctx = file2ctx(file);
324
325 mutex_lock(&dev->mutex);
326 v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
327 mutex_unlock(&dev->mutex);
328 v4l2_ctrl_handler_free(&ctx->ctrl_handler);
329 v4l2_fh_del(&ctx->fh, file);
330 v4l2_fh_exit(&ctx->fh);
331 kfree(ctx);
332
333 return 0;
334 }
335
vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)336 static int vidioc_querycap(struct file *file, void *priv,
337 struct v4l2_capability *cap)
338 {
339 strscpy(cap->driver, DMA2D_NAME, sizeof(cap->driver));
340 strscpy(cap->card, DMA2D_NAME, sizeof(cap->card));
341 strscpy(cap->bus_info, BUS_INFO, sizeof(cap->bus_info));
342
343 return 0;
344 }
345
vidioc_enum_fmt(struct file * file,void * priv,struct v4l2_fmtdesc * f)346 static int vidioc_enum_fmt(struct file *file, void *priv, struct v4l2_fmtdesc *f)
347 {
348 if (f->index >= NUM_FORMATS)
349 return -EINVAL;
350
351 f->pixelformat = formats[f->index].fourcc;
352 return 0;
353 }
354
vidioc_g_fmt(struct file * file,void * priv,struct v4l2_format * f)355 static int vidioc_g_fmt(struct file *file, void *priv, struct v4l2_format *f)
356 {
357 struct dma2d_ctx *ctx = file2ctx(file);
358 struct dma2d_frame *frm;
359
360 frm = get_frame(ctx, f->type);
361 f->fmt.pix.width = frm->width;
362 f->fmt.pix.height = frm->height;
363 f->fmt.pix.field = V4L2_FIELD_NONE;
364 f->fmt.pix.pixelformat = frm->fmt->fourcc;
365 f->fmt.pix.bytesperline = (frm->width * frm->fmt->depth) >> 3;
366 f->fmt.pix.sizeimage = frm->size;
367 f->fmt.pix.colorspace = ctx->colorspace;
368 f->fmt.pix.xfer_func = ctx->xfer_func;
369 f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
370 f->fmt.pix.quantization = ctx->quant;
371
372 return 0;
373 }
374
vidioc_try_fmt(struct file * file,void * priv,struct v4l2_format * f)375 static int vidioc_try_fmt(struct file *file, void *priv, struct v4l2_format *f)
376 {
377 struct dma2d_ctx *ctx = file2ctx(file);
378 struct dma2d_fmt *fmt;
379 enum v4l2_field *field;
380 u32 fourcc = f->fmt.pix.pixelformat;
381
382 fmt = find_fmt(fourcc);
383 if (!fmt) {
384 f->fmt.pix.pixelformat = formats[0].fourcc;
385 fmt = find_fmt(f->fmt.pix.pixelformat);
386 }
387
388 field = &f->fmt.pix.field;
389 if (*field == V4L2_FIELD_ANY)
390 *field = V4L2_FIELD_NONE;
391 else if (*field != V4L2_FIELD_NONE)
392 return -EINVAL;
393
394 if (f->fmt.pix.width > MAX_WIDTH)
395 f->fmt.pix.width = MAX_WIDTH;
396 if (f->fmt.pix.height > MAX_HEIGHT)
397 f->fmt.pix.height = MAX_HEIGHT;
398
399 if (f->fmt.pix.width < 1)
400 f->fmt.pix.width = 1;
401 if (f->fmt.pix.height < 1)
402 f->fmt.pix.height = 1;
403
404 if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && !f->fmt.pix.colorspace) {
405 f->fmt.pix.colorspace = V4L2_COLORSPACE_REC709;
406 } else if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
407 f->fmt.pix.colorspace = ctx->colorspace;
408 f->fmt.pix.xfer_func = ctx->xfer_func;
409 f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
410 f->fmt.pix.quantization = ctx->quant;
411 }
412 f->fmt.pix.bytesperline = (f->fmt.pix.width * fmt->depth) >> 3;
413 f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
414
415 return 0;
416 }
417
vidioc_s_fmt(struct file * file,void * priv,struct v4l2_format * f)418 static int vidioc_s_fmt(struct file *file, void *priv, struct v4l2_format *f)
419 {
420 struct dma2d_ctx *ctx = file2ctx(file);
421 struct vb2_queue *vq;
422 struct dma2d_frame *frm;
423 struct dma2d_fmt *fmt;
424 int ret = 0;
425
426 /* Adjust all values accordingly to the hardware capabilities
427 * and chosen format.
428 */
429 ret = vidioc_try_fmt(file, priv, f);
430 if (ret)
431 return ret;
432
433 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
434 if (vb2_is_busy(vq))
435 return -EBUSY;
436
437 fmt = find_fmt(f->fmt.pix.pixelformat);
438 if (!fmt)
439 return -EINVAL;
440
441 if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
442 ctx->colorspace = f->fmt.pix.colorspace;
443 ctx->xfer_func = f->fmt.pix.xfer_func;
444 ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc;
445 ctx->quant = f->fmt.pix.quantization;
446 }
447
448 frm = get_frame(ctx, f->type);
449 frm->width = f->fmt.pix.width;
450 frm->height = f->fmt.pix.height;
451 frm->size = f->fmt.pix.sizeimage;
452 /* Reset crop settings */
453 frm->o_width = 0;
454 frm->o_height = 0;
455 frm->c_width = frm->width;
456 frm->c_height = frm->height;
457 frm->right = frm->width;
458 frm->bottom = frm->height;
459 frm->fmt = fmt;
460 frm->line_offset = 0;
461
462 return 0;
463 }
464
device_run(void * prv)465 static void device_run(void *prv)
466 {
467 struct dma2d_ctx *ctx = prv;
468 struct dma2d_dev *dev = ctx->dev;
469 struct dma2d_frame *frm_out, *frm_cap;
470 struct vb2_v4l2_buffer *src, *dst;
471 unsigned long flags;
472
473 spin_lock_irqsave(&dev->ctrl_lock, flags);
474 dev->curr = ctx;
475
476 src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
477 dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
478 if (!dst || !src)
479 goto end;
480
481 frm_cap = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
482 frm_out = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
483 if (!frm_cap || !frm_out)
484 goto end;
485
486 src->sequence = frm_out->sequence++;
487 dst->sequence = frm_cap->sequence++;
488 v4l2_m2m_buf_copy_metadata(src, dst);
489
490 if (clk_enable(dev->gate))
491 goto end;
492
493 dma2d_config_fg(dev, frm_out,
494 vb2_dma_contig_plane_dma_addr(&src->vb2_buf, 0));
495
496 /* TODO: add M2M_BLEND handler here */
497
498 if (ctx->op_mode != DMA2D_MODE_R2M) {
499 if (frm_out->fmt->fourcc == frm_cap->fmt->fourcc)
500 ctx->op_mode = DMA2D_MODE_M2M;
501 else
502 ctx->op_mode = DMA2D_MODE_M2M_FPC;
503 }
504
505 dma2d_config_out(dev, frm_cap,
506 vb2_dma_contig_plane_dma_addr(&dst->vb2_buf, 0));
507 dma2d_config_common(dev, ctx->op_mode, frm_cap->width, frm_cap->height);
508
509 dma2d_start(dev);
510 end:
511 spin_unlock_irqrestore(&dev->ctrl_lock, flags);
512 }
513
dma2d_isr(int irq,void * prv)514 static irqreturn_t dma2d_isr(int irq, void *prv)
515 {
516 struct dma2d_dev *dev = prv;
517 struct dma2d_ctx *ctx = dev->curr;
518 struct vb2_v4l2_buffer *src, *dst;
519 u32 s = dma2d_get_int(dev);
520
521 dma2d_clear_int(dev);
522 if (s & ISR_TCIF || s == 0) {
523 clk_disable(dev->gate);
524
525 WARN_ON(!ctx);
526
527 src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
528 dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
529
530 WARN_ON(!dst);
531 WARN_ON(!src);
532
533 v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE);
534 v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE);
535 v4l2_m2m_job_finish(dev->m2m_dev, ctx->fh.m2m_ctx);
536
537 dev->curr = NULL;
538 }
539
540 return IRQ_HANDLED;
541 }
542
543 static const struct v4l2_file_operations dma2d_fops = {
544 .owner = THIS_MODULE,
545 .open = dma2d_open,
546 .release = dma2d_release,
547 .poll = v4l2_m2m_fop_poll,
548 .unlocked_ioctl = video_ioctl2,
549 .mmap = v4l2_m2m_fop_mmap,
550 #ifndef CONFIG_MMU
551 .get_unmapped_area = v4l2_m2m_get_unmapped_area,
552 #endif
553 };
554
555 static const struct v4l2_ioctl_ops dma2d_ioctl_ops = {
556 .vidioc_querycap = vidioc_querycap,
557
558 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt,
559 .vidioc_g_fmt_vid_cap = vidioc_g_fmt,
560 .vidioc_try_fmt_vid_cap = vidioc_try_fmt,
561 .vidioc_s_fmt_vid_cap = vidioc_s_fmt,
562
563 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt,
564 .vidioc_g_fmt_vid_out = vidioc_g_fmt,
565 .vidioc_try_fmt_vid_out = vidioc_try_fmt,
566 .vidioc_s_fmt_vid_out = vidioc_s_fmt,
567
568 .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs,
569 .vidioc_querybuf = v4l2_m2m_ioctl_querybuf,
570 .vidioc_qbuf = v4l2_m2m_ioctl_qbuf,
571 .vidioc_dqbuf = v4l2_m2m_ioctl_dqbuf,
572 .vidioc_prepare_buf = v4l2_m2m_ioctl_prepare_buf,
573 .vidioc_create_bufs = v4l2_m2m_ioctl_create_bufs,
574 .vidioc_expbuf = v4l2_m2m_ioctl_expbuf,
575
576 .vidioc_streamon = v4l2_m2m_ioctl_streamon,
577 .vidioc_streamoff = v4l2_m2m_ioctl_streamoff,
578
579 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
580 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
581 };
582
583 static const struct video_device dma2d_videodev = {
584 .name = DMA2D_NAME,
585 .fops = &dma2d_fops,
586 .ioctl_ops = &dma2d_ioctl_ops,
587 .minor = -1,
588 .release = video_device_release,
589 .vfl_dir = VFL_DIR_M2M,
590 };
591
592 static const struct v4l2_m2m_ops dma2d_m2m_ops = {
593 .device_run = device_run,
594 };
595
596 static const struct of_device_id stm32_dma2d_match[];
597
dma2d_probe(struct platform_device * pdev)598 static int dma2d_probe(struct platform_device *pdev)
599 {
600 struct dma2d_dev *dev;
601 struct video_device *vfd;
602 int ret = 0;
603
604 dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL);
605 if (!dev)
606 return -ENOMEM;
607
608 spin_lock_init(&dev->ctrl_lock);
609 mutex_init(&dev->mutex);
610 atomic_set(&dev->num_inst, 0);
611
612 dev->regs = devm_platform_get_and_ioremap_resource(pdev, 0, NULL);
613 if (IS_ERR(dev->regs))
614 return PTR_ERR(dev->regs);
615
616 dev->gate = clk_get(&pdev->dev, "dma2d");
617 if (IS_ERR(dev->gate)) {
618 dev_err(&pdev->dev, "failed to get dma2d clock gate\n");
619 ret = -ENXIO;
620 return ret;
621 }
622
623 ret = clk_prepare(dev->gate);
624 if (ret) {
625 dev_err(&pdev->dev, "failed to prepare dma2d clock gate\n");
626 goto put_clk_gate;
627 }
628
629 ret = platform_get_irq(pdev, 0);
630 if (ret < 0)
631 goto unprep_clk_gate;
632
633 dev->irq = ret;
634
635 ret = devm_request_irq(&pdev->dev, dev->irq, dma2d_isr,
636 0, pdev->name, dev);
637 if (ret) {
638 dev_err(&pdev->dev, "failed to install IRQ\n");
639 goto unprep_clk_gate;
640 }
641
642 ret = v4l2_device_register(&pdev->dev, &dev->v4l2_dev);
643 if (ret)
644 goto unprep_clk_gate;
645
646 vfd = video_device_alloc();
647 if (!vfd) {
648 v4l2_err(&dev->v4l2_dev, "Failed to allocate video device\n");
649 ret = -ENOMEM;
650 goto unreg_v4l2_dev;
651 }
652
653 *vfd = dma2d_videodev;
654 vfd->lock = &dev->mutex;
655 vfd->v4l2_dev = &dev->v4l2_dev;
656 vfd->device_caps = V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING;
657
658 platform_set_drvdata(pdev, dev);
659 dev->m2m_dev = v4l2_m2m_init(&dma2d_m2m_ops);
660 if (IS_ERR(dev->m2m_dev)) {
661 v4l2_err(&dev->v4l2_dev, "Failed to init mem2mem device\n");
662 ret = PTR_ERR(dev->m2m_dev);
663 goto rel_vdev;
664 }
665
666 ret = video_register_device(vfd, VFL_TYPE_VIDEO, 0);
667 if (ret) {
668 v4l2_err(&dev->v4l2_dev, "Failed to register video device\n");
669 goto free_m2m;
670 }
671
672 video_set_drvdata(vfd, dev);
673 dev->vfd = vfd;
674 v4l2_info(&dev->v4l2_dev, "device registered as /dev/video%d\n",
675 vfd->num);
676 return 0;
677
678 free_m2m:
679 v4l2_m2m_release(dev->m2m_dev);
680 rel_vdev:
681 video_device_release(vfd);
682 unreg_v4l2_dev:
683 v4l2_device_unregister(&dev->v4l2_dev);
684 unprep_clk_gate:
685 clk_unprepare(dev->gate);
686 put_clk_gate:
687 clk_put(dev->gate);
688
689 return ret;
690 }
691
dma2d_remove(struct platform_device * pdev)692 static void dma2d_remove(struct platform_device *pdev)
693 {
694 struct dma2d_dev *dev = platform_get_drvdata(pdev);
695
696 v4l2_info(&dev->v4l2_dev, "Removing " DMA2D_NAME);
697 v4l2_m2m_release(dev->m2m_dev);
698 video_unregister_device(dev->vfd);
699 v4l2_device_unregister(&dev->v4l2_dev);
700 vb2_dma_contig_clear_max_seg_size(&pdev->dev);
701 clk_unprepare(dev->gate);
702 clk_put(dev->gate);
703 }
704
705 static const struct of_device_id stm32_dma2d_match[] = {
706 {
707 .compatible = "st,stm32-dma2d",
708 .data = NULL,
709 },
710 {},
711 };
712 MODULE_DEVICE_TABLE(of, stm32_dma2d_match);
713
714 static struct platform_driver dma2d_pdrv = {
715 .probe = dma2d_probe,
716 .remove = dma2d_remove,
717 .driver = {
718 .name = DMA2D_NAME,
719 .of_match_table = stm32_dma2d_match,
720 },
721 };
722
723 module_platform_driver(dma2d_pdrv);
724
725 MODULE_AUTHOR("Dillon Min <dillon.minfei@gmail.com>");
726 MODULE_DESCRIPTION("STM32 Chrom-Art Accelerator DMA2D driver");
727 MODULE_LICENSE("GPL");
728