xref: /linux/drivers/media/platform/st/stm32/dma2d/dma2d.c (revision 07fdad3a93756b872da7b53647715c48d0f4a2d0)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * STM32 DMA2D - 2D Graphics Accelerator Driver
4  *
5  * Copyright (c) 2021 Dillon Min
6  * Dillon Min, <dillon.minfei@gmail.com>
7  *
8  * based on s5p-g2d
9  *
10  * Copyright (c) 2011 Samsung Electronics Co., Ltd.
11  * Kamil Debski, <k.debski@samsung.com>
12  */
13 
14 #include <linux/module.h>
15 #include <linux/fs.h>
16 #include <linux/timer.h>
17 #include <linux/sched.h>
18 #include <linux/slab.h>
19 #include <linux/clk.h>
20 #include <linux/interrupt.h>
21 #include <linux/of.h>
22 
23 #include <linux/platform_device.h>
24 #include <media/v4l2-mem2mem.h>
25 #include <media/v4l2-device.h>
26 #include <media/v4l2-ioctl.h>
27 #include <media/v4l2-event.h>
28 #include <media/videobuf2-v4l2.h>
29 #include <media/videobuf2-dma-contig.h>
30 
31 #include "dma2d.h"
32 #include "dma2d-regs.h"
33 
34 /*
35  * This V4L2 subdev m2m driver enables Chrom-Art Accelerator unit
36  * of STMicroelectronics STM32 SoC series.
37  *
38  * Currently support r2m, m2m, m2m_pfc.
39  *
40  * - r2m, Filling a part or the whole of a destination image with a specific
41  *   color.
42  * - m2m, Copying a part or the whole of a source image into a part or the
43  *   whole of a destination.
44  * - m2m_pfc, Copying a part or the whole of a source image into a part or the
45  *   whole of a destination image with a pixel format conversion.
46  */
47 
48 static inline struct dma2d_ctx *file2ctx(struct file *filp)
49 {
50 	return container_of(file_to_v4l2_fh(filp), struct dma2d_ctx, fh);
51 }
52 
53 static const struct dma2d_fmt formats[] = {
54 	{
55 		.fourcc	= V4L2_PIX_FMT_ARGB32,
56 		.cmode = DMA2D_CMODE_ARGB8888,
57 		.depth = 32,
58 	},
59 	{
60 		.fourcc	= V4L2_PIX_FMT_RGB24,
61 		.cmode = DMA2D_CMODE_RGB888,
62 		.depth = 24,
63 	},
64 	{
65 		.fourcc	= V4L2_PIX_FMT_RGB565,
66 		.cmode = DMA2D_CMODE_RGB565,
67 		.depth = 16,
68 	},
69 	{
70 		.fourcc	= V4L2_PIX_FMT_ARGB555,
71 		.cmode = DMA2D_CMODE_ARGB1555,
72 		.depth = 16,
73 	},
74 	{
75 		.fourcc	= V4L2_PIX_FMT_ARGB444,
76 		.cmode = DMA2D_CMODE_ARGB4444,
77 		.depth = 16,
78 	},
79 };
80 
81 #define NUM_FORMATS ARRAY_SIZE(formats)
82 
83 static const struct dma2d_frame def_frame = {
84 	.width		= DEFAULT_WIDTH,
85 	.height		= DEFAULT_HEIGHT,
86 	.line_offset	= 0,
87 	.a_rgb		= {0x00, 0x00, 0x00, 0xff},
88 	.a_mode		= DMA2D_ALPHA_MODE_NO_MODIF,
89 	.fmt		= (struct dma2d_fmt *)&formats[0],
90 	.size		= DEFAULT_SIZE,
91 };
92 
93 static struct dma2d_fmt *find_fmt(int pixelformat)
94 {
95 	unsigned int i;
96 
97 	for (i = 0; i < NUM_FORMATS; i++) {
98 		if (formats[i].fourcc == pixelformat)
99 			return (struct dma2d_fmt *)&formats[i];
100 	}
101 
102 	return NULL;
103 }
104 
105 static struct dma2d_frame *get_frame(struct dma2d_ctx *ctx,
106 				     enum v4l2_buf_type type)
107 {
108 	return V4L2_TYPE_IS_OUTPUT(type) ? &ctx->cap : &ctx->out;
109 }
110 
111 static int dma2d_queue_setup(struct vb2_queue *vq,
112 			     unsigned int *nbuffers, unsigned int *nplanes,
113 			     unsigned int sizes[], struct device *alloc_devs[])
114 {
115 	struct dma2d_ctx *ctx = vb2_get_drv_priv(vq);
116 	struct dma2d_frame *f = get_frame(ctx, vq->type);
117 
118 	if (*nplanes)
119 		return sizes[0] < f->size ? -EINVAL : 0;
120 
121 	sizes[0] = f->size;
122 	*nplanes = 1;
123 
124 	return 0;
125 }
126 
127 static int dma2d_buf_out_validate(struct vb2_buffer *vb)
128 {
129 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
130 
131 	if (vbuf->field == V4L2_FIELD_ANY)
132 		vbuf->field = V4L2_FIELD_NONE;
133 	if (vbuf->field != V4L2_FIELD_NONE)
134 		return -EINVAL;
135 
136 	return 0;
137 }
138 
139 static int dma2d_buf_prepare(struct vb2_buffer *vb)
140 {
141 	struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
142 	struct dma2d_frame *f = get_frame(ctx, vb->vb2_queue->type);
143 
144 	if (vb2_plane_size(vb, 0) < f->size)
145 		return -EINVAL;
146 
147 	vb2_set_plane_payload(vb, 0, f->size);
148 
149 	return 0;
150 }
151 
152 static void dma2d_buf_queue(struct vb2_buffer *vb)
153 {
154 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
155 	struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
156 
157 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
158 }
159 
160 static int dma2d_start_streaming(struct vb2_queue *q, unsigned int count)
161 {
162 	struct dma2d_ctx *ctx = vb2_get_drv_priv(q);
163 	struct dma2d_frame *f = get_frame(ctx, q->type);
164 
165 	f->sequence = 0;
166 	return 0;
167 }
168 
169 static void dma2d_stop_streaming(struct vb2_queue *q)
170 {
171 	struct dma2d_ctx *ctx = vb2_get_drv_priv(q);
172 	struct vb2_v4l2_buffer *vbuf;
173 
174 	for (;;) {
175 		if (V4L2_TYPE_IS_OUTPUT(q->type))
176 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
177 		else
178 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
179 		if (!vbuf)
180 			return;
181 		v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
182 	}
183 }
184 
185 static const struct vb2_ops dma2d_qops = {
186 	.queue_setup	= dma2d_queue_setup,
187 	.buf_out_validate	 = dma2d_buf_out_validate,
188 	.buf_prepare	= dma2d_buf_prepare,
189 	.buf_queue	= dma2d_buf_queue,
190 	.start_streaming = dma2d_start_streaming,
191 	.stop_streaming  = dma2d_stop_streaming,
192 };
193 
194 static int queue_init(void *priv, struct vb2_queue *src_vq,
195 		      struct vb2_queue *dst_vq)
196 {
197 	struct dma2d_ctx *ctx = priv;
198 	int ret;
199 
200 	src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
201 	src_vq->io_modes = VB2_MMAP | VB2_DMABUF;
202 	src_vq->drv_priv = ctx;
203 	src_vq->ops = &dma2d_qops;
204 	src_vq->mem_ops = &vb2_dma_contig_memops;
205 	src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
206 	src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
207 	src_vq->lock = &ctx->dev->mutex;
208 	src_vq->dev = ctx->dev->v4l2_dev.dev;
209 
210 	ret = vb2_queue_init(src_vq);
211 	if (ret)
212 		return ret;
213 
214 	dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
215 	dst_vq->io_modes = VB2_MMAP | VB2_DMABUF;
216 	dst_vq->drv_priv = ctx;
217 	dst_vq->ops = &dma2d_qops;
218 	dst_vq->mem_ops = &vb2_dma_contig_memops;
219 	dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
220 	dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
221 	dst_vq->lock = &ctx->dev->mutex;
222 	dst_vq->dev = ctx->dev->v4l2_dev.dev;
223 
224 	return vb2_queue_init(dst_vq);
225 }
226 
227 static int dma2d_s_ctrl(struct v4l2_ctrl *ctrl)
228 {
229 	struct dma2d_frame *frm;
230 	struct dma2d_ctx *ctx = container_of(ctrl->handler, struct dma2d_ctx,
231 								ctrl_handler);
232 	unsigned long flags;
233 
234 	spin_lock_irqsave(&ctx->dev->ctrl_lock, flags);
235 	switch (ctrl->id) {
236 	case V4L2_CID_COLORFX:
237 		if (ctrl->val == V4L2_COLORFX_SET_RGB)
238 			ctx->op_mode = DMA2D_MODE_R2M;
239 		else if (ctrl->val == V4L2_COLORFX_NONE)
240 			ctx->op_mode = DMA2D_MODE_M2M;
241 		break;
242 	case V4L2_CID_COLORFX_RGB:
243 		frm = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
244 		frm->a_rgb[2] = (ctrl->val >> 16) & 0xff;
245 		frm->a_rgb[1] = (ctrl->val >> 8) & 0xff;
246 		frm->a_rgb[0] = (ctrl->val >> 0) & 0xff;
247 		break;
248 	default:
249 		spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
250 		return -EINVAL;
251 	}
252 	spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
253 
254 	return 0;
255 }
256 
257 static const struct v4l2_ctrl_ops dma2d_ctrl_ops = {
258 	.s_ctrl	= dma2d_s_ctrl,
259 };
260 
261 static int dma2d_setup_ctrls(struct dma2d_ctx *ctx)
262 {
263 	struct v4l2_ctrl_handler *handler = &ctx->ctrl_handler;
264 
265 	v4l2_ctrl_handler_init(handler, 2);
266 
267 	v4l2_ctrl_new_std_menu(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX,
268 			       V4L2_COLORFX_SET_RGB, ~0x10001,
269 			       V4L2_COLORFX_NONE);
270 
271 	v4l2_ctrl_new_std(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX_RGB, 0,
272 			  0xffffff, 1, 0);
273 
274 	return 0;
275 }
276 
277 static int dma2d_open(struct file *file)
278 {
279 	struct dma2d_dev *dev = video_drvdata(file);
280 	struct dma2d_ctx *ctx = NULL;
281 	int ret = 0;
282 
283 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
284 	if (!ctx)
285 		return -ENOMEM;
286 	ctx->dev = dev;
287 	/* Set default formats */
288 	ctx->cap		= def_frame;
289 	ctx->bg		= def_frame;
290 	ctx->out	= def_frame;
291 	ctx->op_mode	= DMA2D_MODE_M2M_FPC;
292 	ctx->colorspace = V4L2_COLORSPACE_REC709;
293 	if (mutex_lock_interruptible(&dev->mutex)) {
294 		kfree(ctx);
295 		return -ERESTARTSYS;
296 	}
297 
298 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
299 	if (IS_ERR(ctx->fh.m2m_ctx)) {
300 		ret = PTR_ERR(ctx->fh.m2m_ctx);
301 		mutex_unlock(&dev->mutex);
302 		kfree(ctx);
303 		return ret;
304 	}
305 
306 	v4l2_fh_init(&ctx->fh, video_devdata(file));
307 	v4l2_fh_add(&ctx->fh, file);
308 
309 	dma2d_setup_ctrls(ctx);
310 
311 	/* Write the default values to the ctx struct */
312 	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
313 
314 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
315 	mutex_unlock(&dev->mutex);
316 
317 	return 0;
318 }
319 
320 static int dma2d_release(struct file *file)
321 {
322 	struct dma2d_dev *dev = video_drvdata(file);
323 	struct dma2d_ctx *ctx = file2ctx(file);
324 
325 	mutex_lock(&dev->mutex);
326 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
327 	mutex_unlock(&dev->mutex);
328 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
329 	v4l2_fh_del(&ctx->fh, file);
330 	v4l2_fh_exit(&ctx->fh);
331 	kfree(ctx);
332 
333 	return 0;
334 }
335 
336 static int vidioc_querycap(struct file *file, void *priv,
337 			   struct v4l2_capability *cap)
338 {
339 	strscpy(cap->driver, DMA2D_NAME, sizeof(cap->driver));
340 	strscpy(cap->card, DMA2D_NAME, sizeof(cap->card));
341 	strscpy(cap->bus_info, BUS_INFO, sizeof(cap->bus_info));
342 
343 	return 0;
344 }
345 
346 static int vidioc_enum_fmt(struct file *file, void *priv, struct v4l2_fmtdesc *f)
347 {
348 	if (f->index >= NUM_FORMATS)
349 		return -EINVAL;
350 
351 	f->pixelformat = formats[f->index].fourcc;
352 	return 0;
353 }
354 
355 static int vidioc_g_fmt(struct file *file, void *priv, struct v4l2_format *f)
356 {
357 	struct dma2d_ctx *ctx = file2ctx(file);
358 	struct vb2_queue *vq;
359 	struct dma2d_frame *frm;
360 
361 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
362 	if (!vq)
363 		return -EINVAL;
364 
365 	frm = get_frame(ctx, f->type);
366 	f->fmt.pix.width		= frm->width;
367 	f->fmt.pix.height		= frm->height;
368 	f->fmt.pix.field		= V4L2_FIELD_NONE;
369 	f->fmt.pix.pixelformat		= frm->fmt->fourcc;
370 	f->fmt.pix.bytesperline		= (frm->width * frm->fmt->depth) >> 3;
371 	f->fmt.pix.sizeimage		= frm->size;
372 	f->fmt.pix.colorspace		= ctx->colorspace;
373 	f->fmt.pix.xfer_func		= ctx->xfer_func;
374 	f->fmt.pix.ycbcr_enc		= ctx->ycbcr_enc;
375 	f->fmt.pix.quantization		= ctx->quant;
376 
377 	return 0;
378 }
379 
380 static int vidioc_try_fmt(struct file *file, void *priv, struct v4l2_format *f)
381 {
382 	struct dma2d_ctx *ctx = file2ctx(file);
383 	struct dma2d_fmt *fmt;
384 	enum v4l2_field *field;
385 	u32 fourcc = f->fmt.pix.pixelformat;
386 
387 	fmt = find_fmt(fourcc);
388 	if (!fmt) {
389 		f->fmt.pix.pixelformat = formats[0].fourcc;
390 		fmt = find_fmt(f->fmt.pix.pixelformat);
391 	}
392 
393 	field = &f->fmt.pix.field;
394 	if (*field == V4L2_FIELD_ANY)
395 		*field = V4L2_FIELD_NONE;
396 	else if (*field != V4L2_FIELD_NONE)
397 		return -EINVAL;
398 
399 	if (f->fmt.pix.width > MAX_WIDTH)
400 		f->fmt.pix.width = MAX_WIDTH;
401 	if (f->fmt.pix.height > MAX_HEIGHT)
402 		f->fmt.pix.height = MAX_HEIGHT;
403 
404 	if (f->fmt.pix.width < 1)
405 		f->fmt.pix.width = 1;
406 	if (f->fmt.pix.height < 1)
407 		f->fmt.pix.height = 1;
408 
409 	if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && !f->fmt.pix.colorspace) {
410 		f->fmt.pix.colorspace = V4L2_COLORSPACE_REC709;
411 	} else if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
412 		f->fmt.pix.colorspace	= ctx->colorspace;
413 		f->fmt.pix.xfer_func = ctx->xfer_func;
414 		f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
415 		f->fmt.pix.quantization = ctx->quant;
416 	}
417 	f->fmt.pix.bytesperline = (f->fmt.pix.width * fmt->depth) >> 3;
418 	f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline;
419 
420 	return 0;
421 }
422 
423 static int vidioc_s_fmt(struct file *file, void *priv, struct v4l2_format *f)
424 {
425 	struct dma2d_ctx *ctx = file2ctx(file);
426 	struct vb2_queue *vq;
427 	struct dma2d_frame *frm;
428 	struct dma2d_fmt *fmt;
429 	int ret = 0;
430 
431 	/* Adjust all values accordingly to the hardware capabilities
432 	 * and chosen format.
433 	 */
434 	ret = vidioc_try_fmt(file, priv, f);
435 	if (ret)
436 		return ret;
437 
438 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
439 	if (vb2_is_busy(vq))
440 		return -EBUSY;
441 
442 	fmt = find_fmt(f->fmt.pix.pixelformat);
443 	if (!fmt)
444 		return -EINVAL;
445 
446 	if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
447 		ctx->colorspace = f->fmt.pix.colorspace;
448 		ctx->xfer_func = f->fmt.pix.xfer_func;
449 		ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc;
450 		ctx->quant = f->fmt.pix.quantization;
451 	}
452 
453 	frm = get_frame(ctx, f->type);
454 	frm->width = f->fmt.pix.width;
455 	frm->height = f->fmt.pix.height;
456 	frm->size = f->fmt.pix.sizeimage;
457 	/* Reset crop settings */
458 	frm->o_width = 0;
459 	frm->o_height = 0;
460 	frm->c_width = frm->width;
461 	frm->c_height = frm->height;
462 	frm->right = frm->width;
463 	frm->bottom = frm->height;
464 	frm->fmt = fmt;
465 	frm->line_offset = 0;
466 
467 	return 0;
468 }
469 
470 static void device_run(void *prv)
471 {
472 	struct dma2d_ctx *ctx = prv;
473 	struct dma2d_dev *dev = ctx->dev;
474 	struct dma2d_frame *frm_out, *frm_cap;
475 	struct vb2_v4l2_buffer *src, *dst;
476 	unsigned long flags;
477 
478 	spin_lock_irqsave(&dev->ctrl_lock, flags);
479 	dev->curr = ctx;
480 
481 	src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
482 	dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
483 	if (!dst || !src)
484 		goto end;
485 
486 	frm_cap = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
487 	frm_out = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
488 	if (!frm_cap || !frm_out)
489 		goto end;
490 
491 	src->sequence = frm_out->sequence++;
492 	dst->sequence = frm_cap->sequence++;
493 	v4l2_m2m_buf_copy_metadata(src, dst, true);
494 
495 	if (clk_enable(dev->gate))
496 		goto end;
497 
498 	dma2d_config_fg(dev, frm_out,
499 			vb2_dma_contig_plane_dma_addr(&src->vb2_buf, 0));
500 
501 	/* TODO: add M2M_BLEND handler here */
502 
503 	if (ctx->op_mode != DMA2D_MODE_R2M) {
504 		if (frm_out->fmt->fourcc == frm_cap->fmt->fourcc)
505 			ctx->op_mode = DMA2D_MODE_M2M;
506 		else
507 			ctx->op_mode = DMA2D_MODE_M2M_FPC;
508 	}
509 
510 	dma2d_config_out(dev, frm_cap,
511 			 vb2_dma_contig_plane_dma_addr(&dst->vb2_buf, 0));
512 	dma2d_config_common(dev, ctx->op_mode, frm_cap->width, frm_cap->height);
513 
514 	dma2d_start(dev);
515 end:
516 	spin_unlock_irqrestore(&dev->ctrl_lock, flags);
517 }
518 
519 static irqreturn_t dma2d_isr(int irq, void *prv)
520 {
521 	struct dma2d_dev *dev = prv;
522 	struct dma2d_ctx *ctx = dev->curr;
523 	struct vb2_v4l2_buffer *src, *dst;
524 	u32 s = dma2d_get_int(dev);
525 
526 	dma2d_clear_int(dev);
527 	if (s & ISR_TCIF || s == 0) {
528 		clk_disable(dev->gate);
529 
530 		WARN_ON(!ctx);
531 
532 		src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
533 		dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
534 
535 		WARN_ON(!dst);
536 		WARN_ON(!src);
537 
538 		v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE);
539 		v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE);
540 		v4l2_m2m_job_finish(dev->m2m_dev, ctx->fh.m2m_ctx);
541 
542 		dev->curr = NULL;
543 	}
544 
545 	return IRQ_HANDLED;
546 }
547 
548 static const struct v4l2_file_operations dma2d_fops = {
549 	.owner		= THIS_MODULE,
550 	.open		= dma2d_open,
551 	.release	= dma2d_release,
552 	.poll		= v4l2_m2m_fop_poll,
553 	.unlocked_ioctl	= video_ioctl2,
554 	.mmap		= v4l2_m2m_fop_mmap,
555 #ifndef CONFIG_MMU
556 	.get_unmapped_area = v4l2_m2m_get_unmapped_area,
557 #endif
558 };
559 
560 static const struct v4l2_ioctl_ops dma2d_ioctl_ops = {
561 	.vidioc_querycap	= vidioc_querycap,
562 
563 	.vidioc_enum_fmt_vid_cap	= vidioc_enum_fmt,
564 	.vidioc_g_fmt_vid_cap		= vidioc_g_fmt,
565 	.vidioc_try_fmt_vid_cap		= vidioc_try_fmt,
566 	.vidioc_s_fmt_vid_cap		= vidioc_s_fmt,
567 
568 	.vidioc_enum_fmt_vid_out	= vidioc_enum_fmt,
569 	.vidioc_g_fmt_vid_out		= vidioc_g_fmt,
570 	.vidioc_try_fmt_vid_out		= vidioc_try_fmt,
571 	.vidioc_s_fmt_vid_out		= vidioc_s_fmt,
572 
573 	.vidioc_reqbufs			= v4l2_m2m_ioctl_reqbufs,
574 	.vidioc_querybuf		= v4l2_m2m_ioctl_querybuf,
575 	.vidioc_qbuf			= v4l2_m2m_ioctl_qbuf,
576 	.vidioc_dqbuf			= v4l2_m2m_ioctl_dqbuf,
577 	.vidioc_prepare_buf		= v4l2_m2m_ioctl_prepare_buf,
578 	.vidioc_create_bufs		= v4l2_m2m_ioctl_create_bufs,
579 	.vidioc_expbuf			= v4l2_m2m_ioctl_expbuf,
580 
581 	.vidioc_streamon		= v4l2_m2m_ioctl_streamon,
582 	.vidioc_streamoff		= v4l2_m2m_ioctl_streamoff,
583 
584 	.vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
585 	.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
586 };
587 
588 static const struct video_device dma2d_videodev = {
589 	.name		= DMA2D_NAME,
590 	.fops		= &dma2d_fops,
591 	.ioctl_ops	= &dma2d_ioctl_ops,
592 	.minor		= -1,
593 	.release	= video_device_release,
594 	.vfl_dir	= VFL_DIR_M2M,
595 };
596 
597 static const struct v4l2_m2m_ops dma2d_m2m_ops = {
598 	.device_run	= device_run,
599 };
600 
601 static const struct of_device_id stm32_dma2d_match[];
602 
603 static int dma2d_probe(struct platform_device *pdev)
604 {
605 	struct dma2d_dev *dev;
606 	struct video_device *vfd;
607 	int ret = 0;
608 
609 	dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL);
610 	if (!dev)
611 		return -ENOMEM;
612 
613 	spin_lock_init(&dev->ctrl_lock);
614 	mutex_init(&dev->mutex);
615 	atomic_set(&dev->num_inst, 0);
616 
617 	dev->regs = devm_platform_get_and_ioremap_resource(pdev, 0, NULL);
618 	if (IS_ERR(dev->regs))
619 		return PTR_ERR(dev->regs);
620 
621 	dev->gate = clk_get(&pdev->dev, "dma2d");
622 	if (IS_ERR(dev->gate)) {
623 		dev_err(&pdev->dev, "failed to get dma2d clock gate\n");
624 		ret = -ENXIO;
625 		return ret;
626 	}
627 
628 	ret = clk_prepare(dev->gate);
629 	if (ret) {
630 		dev_err(&pdev->dev, "failed to prepare dma2d clock gate\n");
631 		goto put_clk_gate;
632 	}
633 
634 	ret = platform_get_irq(pdev, 0);
635 	if (ret < 0)
636 		goto unprep_clk_gate;
637 
638 	dev->irq = ret;
639 
640 	ret = devm_request_irq(&pdev->dev, dev->irq, dma2d_isr,
641 			       0, pdev->name, dev);
642 	if (ret) {
643 		dev_err(&pdev->dev, "failed to install IRQ\n");
644 		goto unprep_clk_gate;
645 	}
646 
647 	ret = v4l2_device_register(&pdev->dev, &dev->v4l2_dev);
648 	if (ret)
649 		goto unprep_clk_gate;
650 
651 	vfd = video_device_alloc();
652 	if (!vfd) {
653 		v4l2_err(&dev->v4l2_dev, "Failed to allocate video device\n");
654 		ret = -ENOMEM;
655 		goto unreg_v4l2_dev;
656 	}
657 
658 	*vfd = dma2d_videodev;
659 	vfd->lock = &dev->mutex;
660 	vfd->v4l2_dev = &dev->v4l2_dev;
661 	vfd->device_caps = V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING;
662 
663 	platform_set_drvdata(pdev, dev);
664 	dev->m2m_dev = v4l2_m2m_init(&dma2d_m2m_ops);
665 	if (IS_ERR(dev->m2m_dev)) {
666 		v4l2_err(&dev->v4l2_dev, "Failed to init mem2mem device\n");
667 		ret = PTR_ERR(dev->m2m_dev);
668 		goto rel_vdev;
669 	}
670 
671 	ret = video_register_device(vfd, VFL_TYPE_VIDEO, 0);
672 	if (ret) {
673 		v4l2_err(&dev->v4l2_dev, "Failed to register video device\n");
674 		goto free_m2m;
675 	}
676 
677 	video_set_drvdata(vfd, dev);
678 	dev->vfd = vfd;
679 	v4l2_info(&dev->v4l2_dev, "device registered as /dev/video%d\n",
680 		  vfd->num);
681 	return 0;
682 
683 free_m2m:
684 	v4l2_m2m_release(dev->m2m_dev);
685 rel_vdev:
686 	video_device_release(vfd);
687 unreg_v4l2_dev:
688 	v4l2_device_unregister(&dev->v4l2_dev);
689 unprep_clk_gate:
690 	clk_unprepare(dev->gate);
691 put_clk_gate:
692 	clk_put(dev->gate);
693 
694 	return ret;
695 }
696 
697 static void dma2d_remove(struct platform_device *pdev)
698 {
699 	struct dma2d_dev *dev = platform_get_drvdata(pdev);
700 
701 	v4l2_info(&dev->v4l2_dev, "Removing " DMA2D_NAME);
702 	v4l2_m2m_release(dev->m2m_dev);
703 	video_unregister_device(dev->vfd);
704 	v4l2_device_unregister(&dev->v4l2_dev);
705 	vb2_dma_contig_clear_max_seg_size(&pdev->dev);
706 	clk_unprepare(dev->gate);
707 	clk_put(dev->gate);
708 }
709 
710 static const struct of_device_id stm32_dma2d_match[] = {
711 	{
712 		.compatible = "st,stm32-dma2d",
713 		.data = NULL,
714 	},
715 	{},
716 };
717 MODULE_DEVICE_TABLE(of, stm32_dma2d_match);
718 
719 static struct platform_driver dma2d_pdrv = {
720 	.probe		= dma2d_probe,
721 	.remove		= dma2d_remove,
722 	.driver		= {
723 		.name = DMA2D_NAME,
724 		.of_match_table = stm32_dma2d_match,
725 	},
726 };
727 
728 module_platform_driver(dma2d_pdrv);
729 
730 MODULE_AUTHOR("Dillon Min <dillon.minfei@gmail.com>");
731 MODULE_DESCRIPTION("STM32 Chrom-Art Accelerator DMA2D driver");
732 MODULE_LICENSE("GPL");
733