1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (C) Fuzhou Rockchip Electronics Co.Ltd
4 * Author: Jacob Chen <jacob-chen@iotwrt.com>
5 */
6
7 #include <linux/clk.h>
8 #include <linux/debugfs.h>
9 #include <linux/delay.h>
10 #include <linux/fs.h>
11 #include <linux/interrupt.h>
12 #include <linux/module.h>
13 #include <linux/of.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16 #include <linux/sched.h>
17 #include <linux/slab.h>
18 #include <linux/timer.h>
19
20 #include <linux/platform_device.h>
21 #include <media/v4l2-device.h>
22 #include <media/v4l2-event.h>
23 #include <media/v4l2-ioctl.h>
24 #include <media/v4l2-mem2mem.h>
25 #include <media/videobuf2-dma-sg.h>
26 #include <media/videobuf2-v4l2.h>
27
28 #include "rga-hw.h"
29 #include "rga.h"
30
31 static int debug;
32 module_param(debug, int, 0644);
33
device_run(void * prv)34 static void device_run(void *prv)
35 {
36 struct rga_ctx *ctx = prv;
37 struct rockchip_rga *rga = ctx->rga;
38 struct vb2_v4l2_buffer *src, *dst;
39 unsigned long flags;
40
41 spin_lock_irqsave(&rga->ctrl_lock, flags);
42
43 rga->curr = ctx;
44
45 src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
46 src->sequence = ctx->osequence++;
47
48 dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
49
50 rga_hw_start(rga, vb_to_rga(src), vb_to_rga(dst));
51
52 spin_unlock_irqrestore(&rga->ctrl_lock, flags);
53 }
54
rga_isr(int irq,void * prv)55 static irqreturn_t rga_isr(int irq, void *prv)
56 {
57 struct rockchip_rga *rga = prv;
58 int intr;
59
60 intr = rga_read(rga, RGA_INT) & 0xf;
61
62 rga_mod(rga, RGA_INT, intr << 4, 0xf << 4);
63
64 if (intr & 0x04) {
65 struct vb2_v4l2_buffer *src, *dst;
66 struct rga_ctx *ctx = rga->curr;
67
68 WARN_ON(!ctx);
69
70 rga->curr = NULL;
71
72 src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
73 dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
74
75 WARN_ON(!src);
76 WARN_ON(!dst);
77
78 v4l2_m2m_buf_copy_metadata(src, dst, true);
79
80 dst->sequence = ctx->csequence++;
81
82 v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE);
83 v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE);
84 v4l2_m2m_job_finish(rga->m2m_dev, ctx->fh.m2m_ctx);
85 }
86
87 return IRQ_HANDLED;
88 }
89
90 static const struct v4l2_m2m_ops rga_m2m_ops = {
91 .device_run = device_run,
92 };
93
94 static int
queue_init(void * priv,struct vb2_queue * src_vq,struct vb2_queue * dst_vq)95 queue_init(void *priv, struct vb2_queue *src_vq, struct vb2_queue *dst_vq)
96 {
97 struct rga_ctx *ctx = priv;
98 int ret;
99
100 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
101 src_vq->io_modes = VB2_MMAP | VB2_DMABUF;
102 src_vq->drv_priv = ctx;
103 src_vq->ops = &rga_qops;
104 src_vq->mem_ops = &vb2_dma_sg_memops;
105 dst_vq->gfp_flags = __GFP_DMA32;
106 src_vq->buf_struct_size = sizeof(struct rga_vb_buffer);
107 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
108 src_vq->lock = &ctx->rga->mutex;
109 src_vq->dev = ctx->rga->v4l2_dev.dev;
110
111 ret = vb2_queue_init(src_vq);
112 if (ret)
113 return ret;
114
115 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
116 dst_vq->io_modes = VB2_MMAP | VB2_DMABUF;
117 dst_vq->drv_priv = ctx;
118 dst_vq->ops = &rga_qops;
119 dst_vq->mem_ops = &vb2_dma_sg_memops;
120 dst_vq->gfp_flags = __GFP_DMA32;
121 dst_vq->buf_struct_size = sizeof(struct rga_vb_buffer);
122 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
123 dst_vq->lock = &ctx->rga->mutex;
124 dst_vq->dev = ctx->rga->v4l2_dev.dev;
125
126 return vb2_queue_init(dst_vq);
127 }
128
rga_s_ctrl(struct v4l2_ctrl * ctrl)129 static int rga_s_ctrl(struct v4l2_ctrl *ctrl)
130 {
131 struct rga_ctx *ctx = container_of(ctrl->handler, struct rga_ctx,
132 ctrl_handler);
133 unsigned long flags;
134
135 spin_lock_irqsave(&ctx->rga->ctrl_lock, flags);
136 switch (ctrl->id) {
137 case V4L2_CID_HFLIP:
138 ctx->hflip = ctrl->val;
139 break;
140 case V4L2_CID_VFLIP:
141 ctx->vflip = ctrl->val;
142 break;
143 case V4L2_CID_ROTATE:
144 ctx->rotate = ctrl->val;
145 break;
146 case V4L2_CID_BG_COLOR:
147 ctx->fill_color = ctrl->val;
148 break;
149 }
150 spin_unlock_irqrestore(&ctx->rga->ctrl_lock, flags);
151 return 0;
152 }
153
154 static const struct v4l2_ctrl_ops rga_ctrl_ops = {
155 .s_ctrl = rga_s_ctrl,
156 };
157
rga_setup_ctrls(struct rga_ctx * ctx)158 static int rga_setup_ctrls(struct rga_ctx *ctx)
159 {
160 struct rockchip_rga *rga = ctx->rga;
161
162 v4l2_ctrl_handler_init(&ctx->ctrl_handler, 4);
163
164 v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
165 V4L2_CID_HFLIP, 0, 1, 1, 0);
166
167 v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
168 V4L2_CID_VFLIP, 0, 1, 1, 0);
169
170 v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
171 V4L2_CID_ROTATE, 0, 270, 90, 0);
172
173 v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
174 V4L2_CID_BG_COLOR, 0, 0xffffffff, 1, 0);
175
176 if (ctx->ctrl_handler.error) {
177 int err = ctx->ctrl_handler.error;
178
179 v4l2_err(&rga->v4l2_dev, "%s failed\n", __func__);
180 v4l2_ctrl_handler_free(&ctx->ctrl_handler);
181 return err;
182 }
183
184 return 0;
185 }
186
187 static struct rga_fmt formats[] = {
188 {
189 .fourcc = V4L2_PIX_FMT_ARGB32,
190 .color_swap = RGA_COLOR_ALPHA_SWAP,
191 .hw_format = RGA_COLOR_FMT_ABGR8888,
192 .depth = 32,
193 .uv_factor = 1,
194 .y_div = 1,
195 .x_div = 1,
196 },
197 {
198 .fourcc = V4L2_PIX_FMT_ABGR32,
199 .color_swap = RGA_COLOR_RB_SWAP,
200 .hw_format = RGA_COLOR_FMT_ABGR8888,
201 .depth = 32,
202 .uv_factor = 1,
203 .y_div = 1,
204 .x_div = 1,
205 },
206 {
207 .fourcc = V4L2_PIX_FMT_XBGR32,
208 .color_swap = RGA_COLOR_RB_SWAP,
209 .hw_format = RGA_COLOR_FMT_XBGR8888,
210 .depth = 32,
211 .uv_factor = 1,
212 .y_div = 1,
213 .x_div = 1,
214 },
215 {
216 .fourcc = V4L2_PIX_FMT_RGB24,
217 .color_swap = RGA_COLOR_NONE_SWAP,
218 .hw_format = RGA_COLOR_FMT_RGB888,
219 .depth = 24,
220 .uv_factor = 1,
221 .y_div = 1,
222 .x_div = 1,
223 },
224 {
225 .fourcc = V4L2_PIX_FMT_BGR24,
226 .color_swap = RGA_COLOR_RB_SWAP,
227 .hw_format = RGA_COLOR_FMT_RGB888,
228 .depth = 24,
229 .uv_factor = 1,
230 .y_div = 1,
231 .x_div = 1,
232 },
233 {
234 .fourcc = V4L2_PIX_FMT_ARGB444,
235 .color_swap = RGA_COLOR_RB_SWAP,
236 .hw_format = RGA_COLOR_FMT_ABGR4444,
237 .depth = 16,
238 .uv_factor = 1,
239 .y_div = 1,
240 .x_div = 1,
241 },
242 {
243 .fourcc = V4L2_PIX_FMT_ARGB555,
244 .color_swap = RGA_COLOR_RB_SWAP,
245 .hw_format = RGA_COLOR_FMT_ABGR1555,
246 .depth = 16,
247 .uv_factor = 1,
248 .y_div = 1,
249 .x_div = 1,
250 },
251 {
252 .fourcc = V4L2_PIX_FMT_RGB565,
253 .color_swap = RGA_COLOR_RB_SWAP,
254 .hw_format = RGA_COLOR_FMT_BGR565,
255 .depth = 16,
256 .uv_factor = 1,
257 .y_div = 1,
258 .x_div = 1,
259 },
260 {
261 .fourcc = V4L2_PIX_FMT_NV21,
262 .color_swap = RGA_COLOR_UV_SWAP,
263 .hw_format = RGA_COLOR_FMT_YUV420SP,
264 .depth = 12,
265 .uv_factor = 4,
266 .y_div = 2,
267 .x_div = 1,
268 },
269 {
270 .fourcc = V4L2_PIX_FMT_NV61,
271 .color_swap = RGA_COLOR_UV_SWAP,
272 .hw_format = RGA_COLOR_FMT_YUV422SP,
273 .depth = 16,
274 .uv_factor = 2,
275 .y_div = 1,
276 .x_div = 1,
277 },
278 {
279 .fourcc = V4L2_PIX_FMT_NV12,
280 .color_swap = RGA_COLOR_NONE_SWAP,
281 .hw_format = RGA_COLOR_FMT_YUV420SP,
282 .depth = 12,
283 .uv_factor = 4,
284 .y_div = 2,
285 .x_div = 1,
286 },
287 {
288 .fourcc = V4L2_PIX_FMT_NV12M,
289 .color_swap = RGA_COLOR_NONE_SWAP,
290 .hw_format = RGA_COLOR_FMT_YUV420SP,
291 .depth = 12,
292 .uv_factor = 4,
293 .y_div = 2,
294 .x_div = 1,
295 },
296 {
297 .fourcc = V4L2_PIX_FMT_NV16,
298 .color_swap = RGA_COLOR_NONE_SWAP,
299 .hw_format = RGA_COLOR_FMT_YUV422SP,
300 .depth = 16,
301 .uv_factor = 2,
302 .y_div = 1,
303 .x_div = 1,
304 },
305 {
306 .fourcc = V4L2_PIX_FMT_YUV420,
307 .color_swap = RGA_COLOR_NONE_SWAP,
308 .hw_format = RGA_COLOR_FMT_YUV420P,
309 .depth = 12,
310 .uv_factor = 4,
311 .y_div = 2,
312 .x_div = 2,
313 },
314 {
315 .fourcc = V4L2_PIX_FMT_YUV422P,
316 .color_swap = RGA_COLOR_NONE_SWAP,
317 .hw_format = RGA_COLOR_FMT_YUV422P,
318 .depth = 16,
319 .uv_factor = 2,
320 .y_div = 1,
321 .x_div = 2,
322 },
323 {
324 .fourcc = V4L2_PIX_FMT_YVU420,
325 .color_swap = RGA_COLOR_UV_SWAP,
326 .hw_format = RGA_COLOR_FMT_YUV420P,
327 .depth = 12,
328 .uv_factor = 4,
329 .y_div = 2,
330 .x_div = 2,
331 },
332 };
333
334 #define NUM_FORMATS ARRAY_SIZE(formats)
335
rga_fmt_find(u32 pixelformat)336 static struct rga_fmt *rga_fmt_find(u32 pixelformat)
337 {
338 unsigned int i;
339
340 for (i = 0; i < NUM_FORMATS; i++) {
341 if (formats[i].fourcc == pixelformat)
342 return &formats[i];
343 }
344 return NULL;
345 }
346
347 static struct rga_frame def_frame = {
348 .width = DEFAULT_WIDTH,
349 .height = DEFAULT_HEIGHT,
350 .colorspace = V4L2_COLORSPACE_DEFAULT,
351 .crop.left = 0,
352 .crop.top = 0,
353 .crop.width = DEFAULT_WIDTH,
354 .crop.height = DEFAULT_HEIGHT,
355 .fmt = &formats[0],
356 };
357
rga_get_frame(struct rga_ctx * ctx,enum v4l2_buf_type type)358 struct rga_frame *rga_get_frame(struct rga_ctx *ctx, enum v4l2_buf_type type)
359 {
360 if (V4L2_TYPE_IS_OUTPUT(type))
361 return &ctx->in;
362 if (V4L2_TYPE_IS_CAPTURE(type))
363 return &ctx->out;
364 return ERR_PTR(-EINVAL);
365 }
366
rga_open(struct file * file)367 static int rga_open(struct file *file)
368 {
369 struct rockchip_rga *rga = video_drvdata(file);
370 struct rga_ctx *ctx = NULL;
371 int ret = 0;
372
373 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
374 if (!ctx)
375 return -ENOMEM;
376 ctx->rga = rga;
377 /* Set default formats */
378 ctx->in = def_frame;
379 ctx->out = def_frame;
380
381 v4l2_fill_pixfmt_mp(&ctx->in.pix,
382 ctx->in.fmt->fourcc, ctx->out.width, ctx->out.height);
383 v4l2_fill_pixfmt_mp(&ctx->out.pix,
384 ctx->out.fmt->fourcc, ctx->out.width, ctx->out.height);
385
386 if (mutex_lock_interruptible(&rga->mutex)) {
387 kfree(ctx);
388 return -ERESTARTSYS;
389 }
390 ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(rga->m2m_dev, ctx, &queue_init);
391 if (IS_ERR(ctx->fh.m2m_ctx)) {
392 ret = PTR_ERR(ctx->fh.m2m_ctx);
393 mutex_unlock(&rga->mutex);
394 kfree(ctx);
395 return ret;
396 }
397 v4l2_fh_init(&ctx->fh, video_devdata(file));
398 file->private_data = &ctx->fh;
399 v4l2_fh_add(&ctx->fh);
400
401 rga_setup_ctrls(ctx);
402
403 /* Write the default values to the ctx struct */
404 v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
405
406 ctx->fh.ctrl_handler = &ctx->ctrl_handler;
407 mutex_unlock(&rga->mutex);
408
409 return 0;
410 }
411
rga_release(struct file * file)412 static int rga_release(struct file *file)
413 {
414 struct rga_ctx *ctx =
415 container_of(file->private_data, struct rga_ctx, fh);
416 struct rockchip_rga *rga = ctx->rga;
417
418 mutex_lock(&rga->mutex);
419
420 v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
421
422 v4l2_ctrl_handler_free(&ctx->ctrl_handler);
423 v4l2_fh_del(&ctx->fh);
424 v4l2_fh_exit(&ctx->fh);
425 kfree(ctx);
426
427 mutex_unlock(&rga->mutex);
428
429 return 0;
430 }
431
432 static const struct v4l2_file_operations rga_fops = {
433 .owner = THIS_MODULE,
434 .open = rga_open,
435 .release = rga_release,
436 .poll = v4l2_m2m_fop_poll,
437 .unlocked_ioctl = video_ioctl2,
438 .mmap = v4l2_m2m_fop_mmap,
439 };
440
441 static int
vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)442 vidioc_querycap(struct file *file, void *priv, struct v4l2_capability *cap)
443 {
444 strscpy(cap->driver, RGA_NAME, sizeof(cap->driver));
445 strscpy(cap->card, "rockchip-rga", sizeof(cap->card));
446 strscpy(cap->bus_info, "platform:rga", sizeof(cap->bus_info));
447
448 return 0;
449 }
450
vidioc_enum_fmt(struct file * file,void * prv,struct v4l2_fmtdesc * f)451 static int vidioc_enum_fmt(struct file *file, void *prv, struct v4l2_fmtdesc *f)
452 {
453 struct rga_fmt *fmt;
454
455 if (f->index >= NUM_FORMATS)
456 return -EINVAL;
457
458 fmt = &formats[f->index];
459 f->pixelformat = fmt->fourcc;
460
461 return 0;
462 }
463
vidioc_g_fmt(struct file * file,void * prv,struct v4l2_format * f)464 static int vidioc_g_fmt(struct file *file, void *prv, struct v4l2_format *f)
465 {
466 struct v4l2_pix_format_mplane *pix_fmt = &f->fmt.pix_mp;
467 struct rga_ctx *ctx = prv;
468 struct vb2_queue *vq;
469 struct rga_frame *frm;
470
471 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
472 if (!vq)
473 return -EINVAL;
474 frm = rga_get_frame(ctx, f->type);
475 if (IS_ERR(frm))
476 return PTR_ERR(frm);
477
478 v4l2_fill_pixfmt_mp(pix_fmt, frm->fmt->fourcc, frm->width, frm->height);
479
480 pix_fmt->field = V4L2_FIELD_NONE;
481 pix_fmt->colorspace = frm->colorspace;
482
483 return 0;
484 }
485
vidioc_try_fmt(struct file * file,void * prv,struct v4l2_format * f)486 static int vidioc_try_fmt(struct file *file, void *prv, struct v4l2_format *f)
487 {
488 struct v4l2_pix_format_mplane *pix_fmt = &f->fmt.pix_mp;
489 struct rga_fmt *fmt;
490
491 fmt = rga_fmt_find(pix_fmt->pixelformat);
492 if (!fmt)
493 fmt = &formats[0];
494
495 pix_fmt->width = clamp(pix_fmt->width,
496 (u32)MIN_WIDTH, (u32)MAX_WIDTH);
497 pix_fmt->height = clamp(pix_fmt->height,
498 (u32)MIN_HEIGHT, (u32)MAX_HEIGHT);
499
500 v4l2_fill_pixfmt_mp(pix_fmt, fmt->fourcc, pix_fmt->width, pix_fmt->height);
501 pix_fmt->field = V4L2_FIELD_NONE;
502
503 return 0;
504 }
505
vidioc_s_fmt(struct file * file,void * prv,struct v4l2_format * f)506 static int vidioc_s_fmt(struct file *file, void *prv, struct v4l2_format *f)
507 {
508 struct v4l2_pix_format_mplane *pix_fmt = &f->fmt.pix_mp;
509 struct rga_ctx *ctx = prv;
510 struct rockchip_rga *rga = ctx->rga;
511 struct vb2_queue *vq;
512 struct rga_frame *frm;
513 int ret = 0;
514 int i;
515
516 /* Adjust all values accordingly to the hardware capabilities
517 * and chosen format.
518 */
519 ret = vidioc_try_fmt(file, prv, f);
520 if (ret)
521 return ret;
522 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
523 if (vb2_is_busy(vq)) {
524 v4l2_err(&rga->v4l2_dev, "queue (%d) bust\n", f->type);
525 return -EBUSY;
526 }
527 frm = rga_get_frame(ctx, f->type);
528 if (IS_ERR(frm))
529 return PTR_ERR(frm);
530 frm->width = pix_fmt->width;
531 frm->height = pix_fmt->height;
532 frm->size = 0;
533 for (i = 0; i < pix_fmt->num_planes; i++)
534 frm->size += pix_fmt->plane_fmt[i].sizeimage;
535 frm->fmt = rga_fmt_find(pix_fmt->pixelformat);
536 frm->stride = pix_fmt->plane_fmt[0].bytesperline;
537 frm->colorspace = pix_fmt->colorspace;
538
539 /* Reset crop settings */
540 frm->crop.left = 0;
541 frm->crop.top = 0;
542 frm->crop.width = frm->width;
543 frm->crop.height = frm->height;
544
545 frm->pix = *pix_fmt;
546
547 v4l2_dbg(debug, 1, &rga->v4l2_dev,
548 "[%s] fmt - %p4cc %dx%d (stride %d, sizeimage %d)\n",
549 V4L2_TYPE_IS_OUTPUT(f->type) ? "OUTPUT" : "CAPTURE",
550 &frm->fmt->fourcc, frm->width, frm->height,
551 frm->stride, frm->size);
552
553 for (i = 0; i < pix_fmt->num_planes; i++) {
554 v4l2_dbg(debug, 1, &rga->v4l2_dev,
555 "plane[%d]: size %d, bytesperline %d\n",
556 i, pix_fmt->plane_fmt[i].sizeimage,
557 pix_fmt->plane_fmt[i].bytesperline);
558 }
559
560 return 0;
561 }
562
vidioc_g_selection(struct file * file,void * prv,struct v4l2_selection * s)563 static int vidioc_g_selection(struct file *file, void *prv,
564 struct v4l2_selection *s)
565 {
566 struct rga_ctx *ctx = prv;
567 struct rga_frame *f;
568 bool use_frame = false;
569
570 f = rga_get_frame(ctx, s->type);
571 if (IS_ERR(f))
572 return PTR_ERR(f);
573
574 switch (s->target) {
575 case V4L2_SEL_TGT_COMPOSE_DEFAULT:
576 case V4L2_SEL_TGT_COMPOSE_BOUNDS:
577 if (!V4L2_TYPE_IS_CAPTURE(s->type))
578 return -EINVAL;
579 break;
580 case V4L2_SEL_TGT_CROP_DEFAULT:
581 case V4L2_SEL_TGT_CROP_BOUNDS:
582 if (!V4L2_TYPE_IS_OUTPUT(s->type))
583 return -EINVAL;
584 break;
585 case V4L2_SEL_TGT_COMPOSE:
586 if (!V4L2_TYPE_IS_CAPTURE(s->type))
587 return -EINVAL;
588 use_frame = true;
589 break;
590 case V4L2_SEL_TGT_CROP:
591 if (!V4L2_TYPE_IS_OUTPUT(s->type))
592 return -EINVAL;
593 use_frame = true;
594 break;
595 default:
596 return -EINVAL;
597 }
598
599 if (use_frame) {
600 s->r = f->crop;
601 } else {
602 s->r.left = 0;
603 s->r.top = 0;
604 s->r.width = f->width;
605 s->r.height = f->height;
606 }
607
608 return 0;
609 }
610
vidioc_s_selection(struct file * file,void * prv,struct v4l2_selection * s)611 static int vidioc_s_selection(struct file *file, void *prv,
612 struct v4l2_selection *s)
613 {
614 struct rga_ctx *ctx = prv;
615 struct rockchip_rga *rga = ctx->rga;
616 struct rga_frame *f;
617 int ret = 0;
618
619 f = rga_get_frame(ctx, s->type);
620 if (IS_ERR(f))
621 return PTR_ERR(f);
622
623 switch (s->target) {
624 case V4L2_SEL_TGT_COMPOSE:
625 /*
626 * COMPOSE target is only valid for capture buffer type, return
627 * error for output buffer type
628 */
629 if (!V4L2_TYPE_IS_CAPTURE(s->type))
630 return -EINVAL;
631 break;
632 case V4L2_SEL_TGT_CROP:
633 /*
634 * CROP target is only valid for output buffer type, return
635 * error for capture buffer type
636 */
637 if (!V4L2_TYPE_IS_OUTPUT(s->type))
638 return -EINVAL;
639 break;
640 /*
641 * bound and default crop/compose targets are invalid targets to
642 * try/set
643 */
644 default:
645 return -EINVAL;
646 }
647
648 if (s->r.top < 0 || s->r.left < 0) {
649 v4l2_dbg(debug, 1, &rga->v4l2_dev,
650 "doesn't support negative values for top & left.\n");
651 return -EINVAL;
652 }
653
654 if (s->r.left + s->r.width > f->width ||
655 s->r.top + s->r.height > f->height ||
656 s->r.width < MIN_WIDTH || s->r.height < MIN_HEIGHT) {
657 v4l2_dbg(debug, 1, &rga->v4l2_dev, "unsupported crop value.\n");
658 return -EINVAL;
659 }
660
661 f->crop = s->r;
662
663 return ret;
664 }
665
666 static const struct v4l2_ioctl_ops rga_ioctl_ops = {
667 .vidioc_querycap = vidioc_querycap,
668
669 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt,
670 .vidioc_g_fmt_vid_cap_mplane = vidioc_g_fmt,
671 .vidioc_try_fmt_vid_cap_mplane = vidioc_try_fmt,
672 .vidioc_s_fmt_vid_cap_mplane = vidioc_s_fmt,
673
674 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt,
675 .vidioc_g_fmt_vid_out_mplane = vidioc_g_fmt,
676 .vidioc_try_fmt_vid_out_mplane = vidioc_try_fmt,
677 .vidioc_s_fmt_vid_out_mplane = vidioc_s_fmt,
678
679 .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs,
680 .vidioc_querybuf = v4l2_m2m_ioctl_querybuf,
681 .vidioc_qbuf = v4l2_m2m_ioctl_qbuf,
682 .vidioc_dqbuf = v4l2_m2m_ioctl_dqbuf,
683 .vidioc_prepare_buf = v4l2_m2m_ioctl_prepare_buf,
684 .vidioc_create_bufs = v4l2_m2m_ioctl_create_bufs,
685 .vidioc_expbuf = v4l2_m2m_ioctl_expbuf,
686
687 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
688 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
689
690 .vidioc_streamon = v4l2_m2m_ioctl_streamon,
691 .vidioc_streamoff = v4l2_m2m_ioctl_streamoff,
692
693 .vidioc_g_selection = vidioc_g_selection,
694 .vidioc_s_selection = vidioc_s_selection,
695 };
696
697 static const struct video_device rga_videodev = {
698 .name = "rockchip-rga",
699 .fops = &rga_fops,
700 .ioctl_ops = &rga_ioctl_ops,
701 .minor = -1,
702 .release = video_device_release,
703 .vfl_dir = VFL_DIR_M2M,
704 .device_caps = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING,
705 };
706
rga_enable_clocks(struct rockchip_rga * rga)707 static int rga_enable_clocks(struct rockchip_rga *rga)
708 {
709 int ret;
710
711 ret = clk_prepare_enable(rga->sclk);
712 if (ret) {
713 dev_err(rga->dev, "Cannot enable rga sclk: %d\n", ret);
714 return ret;
715 }
716
717 ret = clk_prepare_enable(rga->aclk);
718 if (ret) {
719 dev_err(rga->dev, "Cannot enable rga aclk: %d\n", ret);
720 goto err_disable_sclk;
721 }
722
723 ret = clk_prepare_enable(rga->hclk);
724 if (ret) {
725 dev_err(rga->dev, "Cannot enable rga hclk: %d\n", ret);
726 goto err_disable_aclk;
727 }
728
729 return 0;
730
731 err_disable_aclk:
732 clk_disable_unprepare(rga->aclk);
733 err_disable_sclk:
734 clk_disable_unprepare(rga->sclk);
735
736 return ret;
737 }
738
rga_disable_clocks(struct rockchip_rga * rga)739 static void rga_disable_clocks(struct rockchip_rga *rga)
740 {
741 clk_disable_unprepare(rga->sclk);
742 clk_disable_unprepare(rga->hclk);
743 clk_disable_unprepare(rga->aclk);
744 }
745
rga_parse_dt(struct rockchip_rga * rga)746 static int rga_parse_dt(struct rockchip_rga *rga)
747 {
748 struct reset_control *core_rst, *axi_rst, *ahb_rst;
749
750 core_rst = devm_reset_control_get(rga->dev, "core");
751 if (IS_ERR(core_rst)) {
752 dev_err(rga->dev, "failed to get core reset controller\n");
753 return PTR_ERR(core_rst);
754 }
755
756 axi_rst = devm_reset_control_get(rga->dev, "axi");
757 if (IS_ERR(axi_rst)) {
758 dev_err(rga->dev, "failed to get axi reset controller\n");
759 return PTR_ERR(axi_rst);
760 }
761
762 ahb_rst = devm_reset_control_get(rga->dev, "ahb");
763 if (IS_ERR(ahb_rst)) {
764 dev_err(rga->dev, "failed to get ahb reset controller\n");
765 return PTR_ERR(ahb_rst);
766 }
767
768 reset_control_assert(core_rst);
769 udelay(1);
770 reset_control_deassert(core_rst);
771
772 reset_control_assert(axi_rst);
773 udelay(1);
774 reset_control_deassert(axi_rst);
775
776 reset_control_assert(ahb_rst);
777 udelay(1);
778 reset_control_deassert(ahb_rst);
779
780 rga->sclk = devm_clk_get(rga->dev, "sclk");
781 if (IS_ERR(rga->sclk)) {
782 dev_err(rga->dev, "failed to get sclk clock\n");
783 return PTR_ERR(rga->sclk);
784 }
785
786 rga->aclk = devm_clk_get(rga->dev, "aclk");
787 if (IS_ERR(rga->aclk)) {
788 dev_err(rga->dev, "failed to get aclk clock\n");
789 return PTR_ERR(rga->aclk);
790 }
791
792 rga->hclk = devm_clk_get(rga->dev, "hclk");
793 if (IS_ERR(rga->hclk)) {
794 dev_err(rga->dev, "failed to get hclk clock\n");
795 return PTR_ERR(rga->hclk);
796 }
797
798 return 0;
799 }
800
rga_probe(struct platform_device * pdev)801 static int rga_probe(struct platform_device *pdev)
802 {
803 struct rockchip_rga *rga;
804 struct video_device *vfd;
805 int ret = 0;
806 int irq;
807
808 if (!pdev->dev.of_node)
809 return -ENODEV;
810
811 rga = devm_kzalloc(&pdev->dev, sizeof(*rga), GFP_KERNEL);
812 if (!rga)
813 return -ENOMEM;
814
815 rga->dev = &pdev->dev;
816 spin_lock_init(&rga->ctrl_lock);
817 mutex_init(&rga->mutex);
818
819 ret = rga_parse_dt(rga);
820 if (ret)
821 return dev_err_probe(&pdev->dev, ret, "Unable to parse OF data\n");
822
823 pm_runtime_enable(rga->dev);
824
825 rga->regs = devm_platform_ioremap_resource(pdev, 0);
826 if (IS_ERR(rga->regs)) {
827 ret = PTR_ERR(rga->regs);
828 goto err_put_clk;
829 }
830
831 irq = platform_get_irq(pdev, 0);
832 if (irq < 0) {
833 ret = irq;
834 goto err_put_clk;
835 }
836
837 ret = devm_request_irq(rga->dev, irq, rga_isr, 0,
838 dev_name(rga->dev), rga);
839 if (ret < 0) {
840 dev_err(rga->dev, "failed to request irq\n");
841 goto err_put_clk;
842 }
843
844 ret = dma_set_mask_and_coherent(rga->dev, DMA_BIT_MASK(32));
845 if (ret) {
846 dev_err(rga->dev, "32-bit DMA not supported");
847 goto err_put_clk;
848 }
849
850 ret = v4l2_device_register(&pdev->dev, &rga->v4l2_dev);
851 if (ret)
852 goto err_put_clk;
853 vfd = video_device_alloc();
854 if (!vfd) {
855 v4l2_err(&rga->v4l2_dev, "Failed to allocate video device\n");
856 ret = -ENOMEM;
857 goto unreg_v4l2_dev;
858 }
859 *vfd = rga_videodev;
860 vfd->lock = &rga->mutex;
861 vfd->v4l2_dev = &rga->v4l2_dev;
862
863 video_set_drvdata(vfd, rga);
864 rga->vfd = vfd;
865
866 platform_set_drvdata(pdev, rga);
867 rga->m2m_dev = v4l2_m2m_init(&rga_m2m_ops);
868 if (IS_ERR(rga->m2m_dev)) {
869 v4l2_err(&rga->v4l2_dev, "Failed to init mem2mem device\n");
870 ret = PTR_ERR(rga->m2m_dev);
871 goto rel_vdev;
872 }
873
874 ret = pm_runtime_resume_and_get(rga->dev);
875 if (ret < 0)
876 goto rel_m2m;
877
878 rga->version.major = (rga_read(rga, RGA_VERSION_INFO) >> 24) & 0xFF;
879 rga->version.minor = (rga_read(rga, RGA_VERSION_INFO) >> 20) & 0x0F;
880
881 v4l2_info(&rga->v4l2_dev, "HW Version: 0x%02x.%02x\n",
882 rga->version.major, rga->version.minor);
883
884 pm_runtime_put(rga->dev);
885
886 /* Create CMD buffer */
887 rga->cmdbuf_virt = dma_alloc_attrs(rga->dev, RGA_CMDBUF_SIZE,
888 &rga->cmdbuf_phy, GFP_KERNEL,
889 DMA_ATTR_WRITE_COMBINE);
890 if (!rga->cmdbuf_virt) {
891 ret = -ENOMEM;
892 goto rel_m2m;
893 }
894
895 def_frame.stride = (def_frame.width * def_frame.fmt->depth) >> 3;
896 def_frame.size = def_frame.stride * def_frame.height;
897
898 ret = video_register_device(vfd, VFL_TYPE_VIDEO, -1);
899 if (ret) {
900 v4l2_err(&rga->v4l2_dev, "Failed to register video device\n");
901 goto free_dma;
902 }
903
904 v4l2_info(&rga->v4l2_dev, "Registered %s as /dev/%s\n",
905 vfd->name, video_device_node_name(vfd));
906
907 return 0;
908
909 free_dma:
910 dma_free_attrs(rga->dev, RGA_CMDBUF_SIZE, rga->cmdbuf_virt,
911 rga->cmdbuf_phy, DMA_ATTR_WRITE_COMBINE);
912 rel_m2m:
913 v4l2_m2m_release(rga->m2m_dev);
914 rel_vdev:
915 video_device_release(vfd);
916 unreg_v4l2_dev:
917 v4l2_device_unregister(&rga->v4l2_dev);
918 err_put_clk:
919 pm_runtime_disable(rga->dev);
920
921 return ret;
922 }
923
rga_remove(struct platform_device * pdev)924 static void rga_remove(struct platform_device *pdev)
925 {
926 struct rockchip_rga *rga = platform_get_drvdata(pdev);
927
928 dma_free_attrs(rga->dev, RGA_CMDBUF_SIZE, rga->cmdbuf_virt,
929 rga->cmdbuf_phy, DMA_ATTR_WRITE_COMBINE);
930
931 v4l2_info(&rga->v4l2_dev, "Removing\n");
932
933 v4l2_m2m_release(rga->m2m_dev);
934 video_unregister_device(rga->vfd);
935 v4l2_device_unregister(&rga->v4l2_dev);
936
937 pm_runtime_disable(rga->dev);
938 }
939
rga_runtime_suspend(struct device * dev)940 static int __maybe_unused rga_runtime_suspend(struct device *dev)
941 {
942 struct rockchip_rga *rga = dev_get_drvdata(dev);
943
944 rga_disable_clocks(rga);
945
946 return 0;
947 }
948
rga_runtime_resume(struct device * dev)949 static int __maybe_unused rga_runtime_resume(struct device *dev)
950 {
951 struct rockchip_rga *rga = dev_get_drvdata(dev);
952
953 return rga_enable_clocks(rga);
954 }
955
956 static const struct dev_pm_ops rga_pm = {
957 SET_RUNTIME_PM_OPS(rga_runtime_suspend,
958 rga_runtime_resume, NULL)
959 };
960
961 static const struct of_device_id rockchip_rga_match[] = {
962 {
963 .compatible = "rockchip,rk3288-rga",
964 },
965 {
966 .compatible = "rockchip,rk3399-rga",
967 },
968 {},
969 };
970
971 MODULE_DEVICE_TABLE(of, rockchip_rga_match);
972
973 static struct platform_driver rga_pdrv = {
974 .probe = rga_probe,
975 .remove_new = rga_remove,
976 .driver = {
977 .name = RGA_NAME,
978 .pm = &rga_pm,
979 .of_match_table = rockchip_rga_match,
980 },
981 };
982
983 module_platform_driver(rga_pdrv);
984
985 MODULE_AUTHOR("Jacob Chen <jacob-chen@iotwrt.com>");
986 MODULE_DESCRIPTION("Rockchip Raster 2d Graphic Acceleration Unit");
987 MODULE_LICENSE("GPL");
988