1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * STM32 DMA2D - 2D Graphics Accelerator Driver 4 * 5 * Copyright (c) 2021 Dillon Min 6 * Dillon Min, <dillon.minfei@gmail.com> 7 * 8 * based on s5p-g2d 9 * 10 * Copyright (c) 2011 Samsung Electronics Co., Ltd. 11 * Kamil Debski, <k.debski@samsung.com> 12 */ 13 14 #include <linux/module.h> 15 #include <linux/fs.h> 16 #include <linux/timer.h> 17 #include <linux/sched.h> 18 #include <linux/slab.h> 19 #include <linux/clk.h> 20 #include <linux/interrupt.h> 21 #include <linux/of.h> 22 23 #include <linux/platform_device.h> 24 #include <media/v4l2-mem2mem.h> 25 #include <media/v4l2-device.h> 26 #include <media/v4l2-ioctl.h> 27 #include <media/v4l2-event.h> 28 #include <media/videobuf2-v4l2.h> 29 #include <media/videobuf2-dma-contig.h> 30 31 #include "dma2d.h" 32 #include "dma2d-regs.h" 33 34 /* 35 * This V4L2 subdev m2m driver enables Chrom-Art Accelerator unit 36 * of STMicroelectronics STM32 SoC series. 37 * 38 * Currently support r2m, m2m, m2m_pfc. 39 * 40 * - r2m, Filling a part or the whole of a destination image with a specific 41 * color. 42 * - m2m, Copying a part or the whole of a source image into a part or the 43 * whole of a destination. 44 * - m2m_pfc, Copying a part or the whole of a source image into a part or the 45 * whole of a destination image with a pixel format conversion. 46 */ 47 48 #define fh2ctx(__fh) container_of(__fh, struct dma2d_ctx, fh) 49 50 static const struct dma2d_fmt formats[] = { 51 { 52 .fourcc = V4L2_PIX_FMT_ARGB32, 53 .cmode = DMA2D_CMODE_ARGB8888, 54 .depth = 32, 55 }, 56 { 57 .fourcc = V4L2_PIX_FMT_RGB24, 58 .cmode = DMA2D_CMODE_RGB888, 59 .depth = 24, 60 }, 61 { 62 .fourcc = V4L2_PIX_FMT_RGB565, 63 .cmode = DMA2D_CMODE_RGB565, 64 .depth = 16, 65 }, 66 { 67 .fourcc = V4L2_PIX_FMT_ARGB555, 68 .cmode = DMA2D_CMODE_ARGB1555, 69 .depth = 16, 70 }, 71 { 72 .fourcc = V4L2_PIX_FMT_ARGB444, 73 .cmode = DMA2D_CMODE_ARGB4444, 74 .depth = 16, 75 }, 76 }; 77 78 #define NUM_FORMATS ARRAY_SIZE(formats) 79 80 static const struct dma2d_frame def_frame = { 81 .width = DEFAULT_WIDTH, 82 .height = DEFAULT_HEIGHT, 83 .line_offset = 0, 84 .a_rgb = {0x00, 0x00, 0x00, 0xff}, 85 .a_mode = DMA2D_ALPHA_MODE_NO_MODIF, 86 .fmt = (struct dma2d_fmt *)&formats[0], 87 .size = DEFAULT_SIZE, 88 }; 89 90 static struct dma2d_fmt *find_fmt(int pixelformat) 91 { 92 unsigned int i; 93 94 for (i = 0; i < NUM_FORMATS; i++) { 95 if (formats[i].fourcc == pixelformat) 96 return (struct dma2d_fmt *)&formats[i]; 97 } 98 99 return NULL; 100 } 101 102 static struct dma2d_frame *get_frame(struct dma2d_ctx *ctx, 103 enum v4l2_buf_type type) 104 { 105 return V4L2_TYPE_IS_OUTPUT(type) ? &ctx->cap : &ctx->out; 106 } 107 108 static int dma2d_queue_setup(struct vb2_queue *vq, 109 unsigned int *nbuffers, unsigned int *nplanes, 110 unsigned int sizes[], struct device *alloc_devs[]) 111 { 112 struct dma2d_ctx *ctx = vb2_get_drv_priv(vq); 113 struct dma2d_frame *f = get_frame(ctx, vq->type); 114 115 if (*nplanes) 116 return sizes[0] < f->size ? -EINVAL : 0; 117 118 sizes[0] = f->size; 119 *nplanes = 1; 120 121 return 0; 122 } 123 124 static int dma2d_buf_out_validate(struct vb2_buffer *vb) 125 { 126 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 127 128 if (vbuf->field == V4L2_FIELD_ANY) 129 vbuf->field = V4L2_FIELD_NONE; 130 if (vbuf->field != V4L2_FIELD_NONE) 131 return -EINVAL; 132 133 return 0; 134 } 135 136 static int dma2d_buf_prepare(struct vb2_buffer *vb) 137 { 138 struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 139 struct dma2d_frame *f = get_frame(ctx, vb->vb2_queue->type); 140 141 if (vb2_plane_size(vb, 0) < f->size) 142 return -EINVAL; 143 144 vb2_set_plane_payload(vb, 0, f->size); 145 146 return 0; 147 } 148 149 static void dma2d_buf_queue(struct vb2_buffer *vb) 150 { 151 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 152 struct dma2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 153 154 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf); 155 } 156 157 static int dma2d_start_streaming(struct vb2_queue *q, unsigned int count) 158 { 159 struct dma2d_ctx *ctx = vb2_get_drv_priv(q); 160 struct dma2d_frame *f = get_frame(ctx, q->type); 161 162 f->sequence = 0; 163 return 0; 164 } 165 166 static void dma2d_stop_streaming(struct vb2_queue *q) 167 { 168 struct dma2d_ctx *ctx = vb2_get_drv_priv(q); 169 struct vb2_v4l2_buffer *vbuf; 170 171 for (;;) { 172 if (V4L2_TYPE_IS_OUTPUT(q->type)) 173 vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx); 174 else 175 vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); 176 if (!vbuf) 177 return; 178 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR); 179 } 180 } 181 182 static const struct vb2_ops dma2d_qops = { 183 .queue_setup = dma2d_queue_setup, 184 .buf_out_validate = dma2d_buf_out_validate, 185 .buf_prepare = dma2d_buf_prepare, 186 .buf_queue = dma2d_buf_queue, 187 .start_streaming = dma2d_start_streaming, 188 .stop_streaming = dma2d_stop_streaming, 189 }; 190 191 static int queue_init(void *priv, struct vb2_queue *src_vq, 192 struct vb2_queue *dst_vq) 193 { 194 struct dma2d_ctx *ctx = priv; 195 int ret; 196 197 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 198 src_vq->io_modes = VB2_MMAP | VB2_DMABUF; 199 src_vq->drv_priv = ctx; 200 src_vq->ops = &dma2d_qops; 201 src_vq->mem_ops = &vb2_dma_contig_memops; 202 src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer); 203 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY; 204 src_vq->lock = &ctx->dev->mutex; 205 src_vq->dev = ctx->dev->v4l2_dev.dev; 206 207 ret = vb2_queue_init(src_vq); 208 if (ret) 209 return ret; 210 211 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 212 dst_vq->io_modes = VB2_MMAP | VB2_DMABUF; 213 dst_vq->drv_priv = ctx; 214 dst_vq->ops = &dma2d_qops; 215 dst_vq->mem_ops = &vb2_dma_contig_memops; 216 dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer); 217 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY; 218 dst_vq->lock = &ctx->dev->mutex; 219 dst_vq->dev = ctx->dev->v4l2_dev.dev; 220 221 return vb2_queue_init(dst_vq); 222 } 223 224 static int dma2d_s_ctrl(struct v4l2_ctrl *ctrl) 225 { 226 struct dma2d_frame *frm; 227 struct dma2d_ctx *ctx = container_of(ctrl->handler, struct dma2d_ctx, 228 ctrl_handler); 229 unsigned long flags; 230 231 spin_lock_irqsave(&ctx->dev->ctrl_lock, flags); 232 switch (ctrl->id) { 233 case V4L2_CID_COLORFX: 234 if (ctrl->val == V4L2_COLORFX_SET_RGB) 235 ctx->op_mode = DMA2D_MODE_R2M; 236 else if (ctrl->val == V4L2_COLORFX_NONE) 237 ctx->op_mode = DMA2D_MODE_M2M; 238 break; 239 case V4L2_CID_COLORFX_RGB: 240 frm = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE); 241 frm->a_rgb[2] = (ctrl->val >> 16) & 0xff; 242 frm->a_rgb[1] = (ctrl->val >> 8) & 0xff; 243 frm->a_rgb[0] = (ctrl->val >> 0) & 0xff; 244 break; 245 default: 246 spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags); 247 return -EINVAL; 248 } 249 spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags); 250 251 return 0; 252 } 253 254 static const struct v4l2_ctrl_ops dma2d_ctrl_ops = { 255 .s_ctrl = dma2d_s_ctrl, 256 }; 257 258 static int dma2d_setup_ctrls(struct dma2d_ctx *ctx) 259 { 260 struct v4l2_ctrl_handler *handler = &ctx->ctrl_handler; 261 262 v4l2_ctrl_handler_init(handler, 2); 263 264 v4l2_ctrl_new_std_menu(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX, 265 V4L2_COLORFX_SET_RGB, ~0x10001, 266 V4L2_COLORFX_NONE); 267 268 v4l2_ctrl_new_std(handler, &dma2d_ctrl_ops, V4L2_CID_COLORFX_RGB, 0, 269 0xffffff, 1, 0); 270 271 return 0; 272 } 273 274 static int dma2d_open(struct file *file) 275 { 276 struct dma2d_dev *dev = video_drvdata(file); 277 struct dma2d_ctx *ctx = NULL; 278 int ret = 0; 279 280 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL); 281 if (!ctx) 282 return -ENOMEM; 283 ctx->dev = dev; 284 /* Set default formats */ 285 ctx->cap = def_frame; 286 ctx->bg = def_frame; 287 ctx->out = def_frame; 288 ctx->op_mode = DMA2D_MODE_M2M_FPC; 289 ctx->colorspace = V4L2_COLORSPACE_REC709; 290 if (mutex_lock_interruptible(&dev->mutex)) { 291 kfree(ctx); 292 return -ERESTARTSYS; 293 } 294 295 ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init); 296 if (IS_ERR(ctx->fh.m2m_ctx)) { 297 ret = PTR_ERR(ctx->fh.m2m_ctx); 298 mutex_unlock(&dev->mutex); 299 kfree(ctx); 300 return ret; 301 } 302 303 v4l2_fh_init(&ctx->fh, video_devdata(file)); 304 file->private_data = &ctx->fh; 305 v4l2_fh_add(&ctx->fh); 306 307 dma2d_setup_ctrls(ctx); 308 309 /* Write the default values to the ctx struct */ 310 v4l2_ctrl_handler_setup(&ctx->ctrl_handler); 311 312 ctx->fh.ctrl_handler = &ctx->ctrl_handler; 313 mutex_unlock(&dev->mutex); 314 315 return 0; 316 } 317 318 static int dma2d_release(struct file *file) 319 { 320 struct dma2d_dev *dev = video_drvdata(file); 321 struct dma2d_ctx *ctx = fh2ctx(file->private_data); 322 323 mutex_lock(&dev->mutex); 324 v4l2_m2m_ctx_release(ctx->fh.m2m_ctx); 325 mutex_unlock(&dev->mutex); 326 v4l2_ctrl_handler_free(&ctx->ctrl_handler); 327 v4l2_fh_del(&ctx->fh); 328 v4l2_fh_exit(&ctx->fh); 329 kfree(ctx); 330 331 return 0; 332 } 333 334 static int vidioc_querycap(struct file *file, void *priv, 335 struct v4l2_capability *cap) 336 { 337 strscpy(cap->driver, DMA2D_NAME, sizeof(cap->driver)); 338 strscpy(cap->card, DMA2D_NAME, sizeof(cap->card)); 339 strscpy(cap->bus_info, BUS_INFO, sizeof(cap->bus_info)); 340 341 return 0; 342 } 343 344 static int vidioc_enum_fmt(struct file *file, void *prv, struct v4l2_fmtdesc *f) 345 { 346 if (f->index >= NUM_FORMATS) 347 return -EINVAL; 348 349 f->pixelformat = formats[f->index].fourcc; 350 return 0; 351 } 352 353 static int vidioc_g_fmt(struct file *file, void *prv, struct v4l2_format *f) 354 { 355 struct dma2d_ctx *ctx = prv; 356 struct vb2_queue *vq; 357 struct dma2d_frame *frm; 358 359 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); 360 if (!vq) 361 return -EINVAL; 362 363 frm = get_frame(ctx, f->type); 364 f->fmt.pix.width = frm->width; 365 f->fmt.pix.height = frm->height; 366 f->fmt.pix.field = V4L2_FIELD_NONE; 367 f->fmt.pix.pixelformat = frm->fmt->fourcc; 368 f->fmt.pix.bytesperline = (frm->width * frm->fmt->depth) >> 3; 369 f->fmt.pix.sizeimage = frm->size; 370 f->fmt.pix.colorspace = ctx->colorspace; 371 f->fmt.pix.xfer_func = ctx->xfer_func; 372 f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc; 373 f->fmt.pix.quantization = ctx->quant; 374 375 return 0; 376 } 377 378 static int vidioc_try_fmt(struct file *file, void *prv, struct v4l2_format *f) 379 { 380 struct dma2d_ctx *ctx = prv; 381 struct dma2d_fmt *fmt; 382 enum v4l2_field *field; 383 u32 fourcc = f->fmt.pix.pixelformat; 384 385 fmt = find_fmt(fourcc); 386 if (!fmt) { 387 f->fmt.pix.pixelformat = formats[0].fourcc; 388 fmt = find_fmt(f->fmt.pix.pixelformat); 389 } 390 391 field = &f->fmt.pix.field; 392 if (*field == V4L2_FIELD_ANY) 393 *field = V4L2_FIELD_NONE; 394 else if (*field != V4L2_FIELD_NONE) 395 return -EINVAL; 396 397 if (f->fmt.pix.width > MAX_WIDTH) 398 f->fmt.pix.width = MAX_WIDTH; 399 if (f->fmt.pix.height > MAX_HEIGHT) 400 f->fmt.pix.height = MAX_HEIGHT; 401 402 if (f->fmt.pix.width < 1) 403 f->fmt.pix.width = 1; 404 if (f->fmt.pix.height < 1) 405 f->fmt.pix.height = 1; 406 407 if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && !f->fmt.pix.colorspace) { 408 f->fmt.pix.colorspace = V4L2_COLORSPACE_REC709; 409 } else if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { 410 f->fmt.pix.colorspace = ctx->colorspace; 411 f->fmt.pix.xfer_func = ctx->xfer_func; 412 f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc; 413 f->fmt.pix.quantization = ctx->quant; 414 } 415 f->fmt.pix.bytesperline = (f->fmt.pix.width * fmt->depth) >> 3; 416 f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline; 417 418 return 0; 419 } 420 421 static int vidioc_s_fmt(struct file *file, void *prv, struct v4l2_format *f) 422 { 423 struct dma2d_ctx *ctx = prv; 424 struct vb2_queue *vq; 425 struct dma2d_frame *frm; 426 struct dma2d_fmt *fmt; 427 int ret = 0; 428 429 /* Adjust all values accordingly to the hardware capabilities 430 * and chosen format. 431 */ 432 ret = vidioc_try_fmt(file, prv, f); 433 if (ret) 434 return ret; 435 436 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); 437 if (vb2_is_busy(vq)) 438 return -EBUSY; 439 440 fmt = find_fmt(f->fmt.pix.pixelformat); 441 if (!fmt) 442 return -EINVAL; 443 444 if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) { 445 ctx->colorspace = f->fmt.pix.colorspace; 446 ctx->xfer_func = f->fmt.pix.xfer_func; 447 ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc; 448 ctx->quant = f->fmt.pix.quantization; 449 } 450 451 frm = get_frame(ctx, f->type); 452 frm->width = f->fmt.pix.width; 453 frm->height = f->fmt.pix.height; 454 frm->size = f->fmt.pix.sizeimage; 455 /* Reset crop settings */ 456 frm->o_width = 0; 457 frm->o_height = 0; 458 frm->c_width = frm->width; 459 frm->c_height = frm->height; 460 frm->right = frm->width; 461 frm->bottom = frm->height; 462 frm->fmt = fmt; 463 frm->line_offset = 0; 464 465 return 0; 466 } 467 468 static void device_run(void *prv) 469 { 470 struct dma2d_ctx *ctx = prv; 471 struct dma2d_dev *dev = ctx->dev; 472 struct dma2d_frame *frm_out, *frm_cap; 473 struct vb2_v4l2_buffer *src, *dst; 474 unsigned long flags; 475 476 spin_lock_irqsave(&dev->ctrl_lock, flags); 477 dev->curr = ctx; 478 479 src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx); 480 dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx); 481 if (!dst || !src) 482 goto end; 483 484 frm_cap = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE); 485 frm_out = get_frame(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT); 486 if (!frm_cap || !frm_out) 487 goto end; 488 489 src->sequence = frm_out->sequence++; 490 dst->sequence = frm_cap->sequence++; 491 v4l2_m2m_buf_copy_metadata(src, dst, true); 492 493 clk_enable(dev->gate); 494 495 dma2d_config_fg(dev, frm_out, 496 vb2_dma_contig_plane_dma_addr(&src->vb2_buf, 0)); 497 498 /* TODO: add M2M_BLEND handler here */ 499 500 if (ctx->op_mode != DMA2D_MODE_R2M) { 501 if (frm_out->fmt->fourcc == frm_cap->fmt->fourcc) 502 ctx->op_mode = DMA2D_MODE_M2M; 503 else 504 ctx->op_mode = DMA2D_MODE_M2M_FPC; 505 } 506 507 dma2d_config_out(dev, frm_cap, 508 vb2_dma_contig_plane_dma_addr(&dst->vb2_buf, 0)); 509 dma2d_config_common(dev, ctx->op_mode, frm_cap->width, frm_cap->height); 510 511 dma2d_start(dev); 512 end: 513 spin_unlock_irqrestore(&dev->ctrl_lock, flags); 514 } 515 516 static irqreturn_t dma2d_isr(int irq, void *prv) 517 { 518 struct dma2d_dev *dev = prv; 519 struct dma2d_ctx *ctx = dev->curr; 520 struct vb2_v4l2_buffer *src, *dst; 521 u32 s = dma2d_get_int(dev); 522 523 dma2d_clear_int(dev); 524 if (s & ISR_TCIF || s == 0) { 525 clk_disable(dev->gate); 526 527 WARN_ON(!ctx); 528 529 src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx); 530 dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); 531 532 WARN_ON(!dst); 533 WARN_ON(!src); 534 535 v4l2_m2m_buf_done(src, VB2_BUF_STATE_DONE); 536 v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE); 537 v4l2_m2m_job_finish(dev->m2m_dev, ctx->fh.m2m_ctx); 538 539 dev->curr = NULL; 540 } 541 542 return IRQ_HANDLED; 543 } 544 545 static const struct v4l2_file_operations dma2d_fops = { 546 .owner = THIS_MODULE, 547 .open = dma2d_open, 548 .release = dma2d_release, 549 .poll = v4l2_m2m_fop_poll, 550 .unlocked_ioctl = video_ioctl2, 551 .mmap = v4l2_m2m_fop_mmap, 552 #ifndef CONFIG_MMU 553 .get_unmapped_area = v4l2_m2m_get_unmapped_area, 554 #endif 555 }; 556 557 static const struct v4l2_ioctl_ops dma2d_ioctl_ops = { 558 .vidioc_querycap = vidioc_querycap, 559 560 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt, 561 .vidioc_g_fmt_vid_cap = vidioc_g_fmt, 562 .vidioc_try_fmt_vid_cap = vidioc_try_fmt, 563 .vidioc_s_fmt_vid_cap = vidioc_s_fmt, 564 565 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt, 566 .vidioc_g_fmt_vid_out = vidioc_g_fmt, 567 .vidioc_try_fmt_vid_out = vidioc_try_fmt, 568 .vidioc_s_fmt_vid_out = vidioc_s_fmt, 569 570 .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs, 571 .vidioc_querybuf = v4l2_m2m_ioctl_querybuf, 572 .vidioc_qbuf = v4l2_m2m_ioctl_qbuf, 573 .vidioc_dqbuf = v4l2_m2m_ioctl_dqbuf, 574 .vidioc_prepare_buf = v4l2_m2m_ioctl_prepare_buf, 575 .vidioc_create_bufs = v4l2_m2m_ioctl_create_bufs, 576 .vidioc_expbuf = v4l2_m2m_ioctl_expbuf, 577 578 .vidioc_streamon = v4l2_m2m_ioctl_streamon, 579 .vidioc_streamoff = v4l2_m2m_ioctl_streamoff, 580 581 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event, 582 .vidioc_unsubscribe_event = v4l2_event_unsubscribe, 583 }; 584 585 static const struct video_device dma2d_videodev = { 586 .name = DMA2D_NAME, 587 .fops = &dma2d_fops, 588 .ioctl_ops = &dma2d_ioctl_ops, 589 .minor = -1, 590 .release = video_device_release, 591 .vfl_dir = VFL_DIR_M2M, 592 }; 593 594 static const struct v4l2_m2m_ops dma2d_m2m_ops = { 595 .device_run = device_run, 596 }; 597 598 static const struct of_device_id stm32_dma2d_match[]; 599 600 static int dma2d_probe(struct platform_device *pdev) 601 { 602 struct dma2d_dev *dev; 603 struct video_device *vfd; 604 int ret = 0; 605 606 dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL); 607 if (!dev) 608 return -ENOMEM; 609 610 spin_lock_init(&dev->ctrl_lock); 611 mutex_init(&dev->mutex); 612 atomic_set(&dev->num_inst, 0); 613 614 dev->regs = devm_platform_get_and_ioremap_resource(pdev, 0, NULL); 615 if (IS_ERR(dev->regs)) 616 return PTR_ERR(dev->regs); 617 618 dev->gate = clk_get(&pdev->dev, "dma2d"); 619 if (IS_ERR(dev->gate)) { 620 dev_err(&pdev->dev, "failed to get dma2d clock gate\n"); 621 ret = -ENXIO; 622 return ret; 623 } 624 625 ret = clk_prepare(dev->gate); 626 if (ret) { 627 dev_err(&pdev->dev, "failed to prepare dma2d clock gate\n"); 628 goto put_clk_gate; 629 } 630 631 ret = platform_get_irq(pdev, 0); 632 if (ret < 0) 633 goto unprep_clk_gate; 634 635 dev->irq = ret; 636 637 ret = devm_request_irq(&pdev->dev, dev->irq, dma2d_isr, 638 0, pdev->name, dev); 639 if (ret) { 640 dev_err(&pdev->dev, "failed to install IRQ\n"); 641 goto unprep_clk_gate; 642 } 643 644 ret = v4l2_device_register(&pdev->dev, &dev->v4l2_dev); 645 if (ret) 646 goto unprep_clk_gate; 647 648 vfd = video_device_alloc(); 649 if (!vfd) { 650 v4l2_err(&dev->v4l2_dev, "Failed to allocate video device\n"); 651 ret = -ENOMEM; 652 goto unreg_v4l2_dev; 653 } 654 655 *vfd = dma2d_videodev; 656 vfd->lock = &dev->mutex; 657 vfd->v4l2_dev = &dev->v4l2_dev; 658 vfd->device_caps = V4L2_CAP_VIDEO_M2M | V4L2_CAP_STREAMING; 659 660 platform_set_drvdata(pdev, dev); 661 dev->m2m_dev = v4l2_m2m_init(&dma2d_m2m_ops); 662 if (IS_ERR(dev->m2m_dev)) { 663 v4l2_err(&dev->v4l2_dev, "Failed to init mem2mem device\n"); 664 ret = PTR_ERR(dev->m2m_dev); 665 goto rel_vdev; 666 } 667 668 ret = video_register_device(vfd, VFL_TYPE_VIDEO, 0); 669 if (ret) { 670 v4l2_err(&dev->v4l2_dev, "Failed to register video device\n"); 671 goto free_m2m; 672 } 673 674 video_set_drvdata(vfd, dev); 675 dev->vfd = vfd; 676 v4l2_info(&dev->v4l2_dev, "device registered as /dev/video%d\n", 677 vfd->num); 678 return 0; 679 680 free_m2m: 681 v4l2_m2m_release(dev->m2m_dev); 682 rel_vdev: 683 video_device_release(vfd); 684 unreg_v4l2_dev: 685 v4l2_device_unregister(&dev->v4l2_dev); 686 unprep_clk_gate: 687 clk_unprepare(dev->gate); 688 put_clk_gate: 689 clk_put(dev->gate); 690 691 return ret; 692 } 693 694 static void dma2d_remove(struct platform_device *pdev) 695 { 696 struct dma2d_dev *dev = platform_get_drvdata(pdev); 697 698 v4l2_info(&dev->v4l2_dev, "Removing " DMA2D_NAME); 699 v4l2_m2m_release(dev->m2m_dev); 700 video_unregister_device(dev->vfd); 701 v4l2_device_unregister(&dev->v4l2_dev); 702 vb2_dma_contig_clear_max_seg_size(&pdev->dev); 703 clk_unprepare(dev->gate); 704 clk_put(dev->gate); 705 } 706 707 static const struct of_device_id stm32_dma2d_match[] = { 708 { 709 .compatible = "st,stm32-dma2d", 710 .data = NULL, 711 }, 712 {}, 713 }; 714 MODULE_DEVICE_TABLE(of, stm32_dma2d_match); 715 716 static struct platform_driver dma2d_pdrv = { 717 .probe = dma2d_probe, 718 .remove = dma2d_remove, 719 .driver = { 720 .name = DMA2D_NAME, 721 .of_match_table = stm32_dma2d_match, 722 }, 723 }; 724 725 module_platform_driver(dma2d_pdrv); 726 727 MODULE_AUTHOR("Dillon Min <dillon.minfei@gmail.com>"); 728 MODULE_DESCRIPTION("STM32 Chrom-Art Accelerator DMA2D driver"); 729 MODULE_LICENSE("GPL"); 730