1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright 2020-2021 NXP 4 */ 5 6 #include <linux/init.h> 7 #include <linux/interconnect.h> 8 #include <linux/ioctl.h> 9 #include <linux/list.h> 10 #include <linux/kernel.h> 11 #include <linux/module.h> 12 #include <linux/pm_runtime.h> 13 #include <linux/videodev2.h> 14 #include <media/v4l2-device.h> 15 #include <media/v4l2-event.h> 16 #include <media/v4l2-mem2mem.h> 17 #include <media/v4l2-ioctl.h> 18 #include <media/videobuf2-v4l2.h> 19 #include <media/videobuf2-dma-contig.h> 20 #include <media/videobuf2-vmalloc.h> 21 #include "vpu.h" 22 #include "vpu_core.h" 23 #include "vpu_v4l2.h" 24 #include "vpu_msgs.h" 25 #include "vpu_helpers.h" 26 27 static char *vpu_type_name(u32 type) 28 { 29 return V4L2_TYPE_IS_OUTPUT(type) ? "output" : "capture"; 30 } 31 32 void vpu_inst_lock(struct vpu_inst *inst) 33 { 34 mutex_lock(&inst->lock); 35 } 36 37 void vpu_inst_unlock(struct vpu_inst *inst) 38 { 39 mutex_unlock(&inst->lock); 40 } 41 42 dma_addr_t vpu_get_vb_phy_addr(struct vb2_buffer *vb, u32 plane_no) 43 { 44 if (plane_no >= vb->num_planes) 45 return 0; 46 return vb2_dma_contig_plane_dma_addr(vb, plane_no) + 47 vb->planes[plane_no].data_offset; 48 } 49 50 static unsigned int vpu_get_vb_length(struct vb2_buffer *vb, u32 plane_no) 51 { 52 if (plane_no >= vb->num_planes) 53 return 0; 54 return vb2_plane_size(vb, plane_no) - vb->planes[plane_no].data_offset; 55 } 56 57 void vpu_set_buffer_state(struct vb2_v4l2_buffer *vbuf, unsigned int state) 58 { 59 struct vpu_vb2_buffer *vpu_buf = to_vpu_vb2_buffer(vbuf); 60 61 vpu_buf->state = state; 62 } 63 64 unsigned int vpu_get_buffer_state(struct vb2_v4l2_buffer *vbuf) 65 { 66 struct vpu_vb2_buffer *vpu_buf = to_vpu_vb2_buffer(vbuf); 67 68 return vpu_buf->state; 69 } 70 71 void vpu_set_buffer_average_qp(struct vb2_v4l2_buffer *vbuf, u32 qp) 72 { 73 struct vpu_vb2_buffer *vpu_buf = to_vpu_vb2_buffer(vbuf); 74 75 vpu_buf->average_qp = qp; 76 } 77 78 void vpu_v4l2_set_error(struct vpu_inst *inst) 79 { 80 vpu_inst_lock(inst); 81 dev_err(inst->dev, "some error occurs in codec\n"); 82 if (inst->fh.m2m_ctx) { 83 vb2_queue_error(v4l2_m2m_get_src_vq(inst->fh.m2m_ctx)); 84 vb2_queue_error(v4l2_m2m_get_dst_vq(inst->fh.m2m_ctx)); 85 } 86 vpu_inst_unlock(inst); 87 } 88 89 static int vpu_notify_eos(struct vpu_inst *inst) 90 { 91 static const struct v4l2_event ev = { 92 .id = 0, 93 .type = V4L2_EVENT_EOS 94 }; 95 96 vpu_trace(inst->dev, "[%d]\n", inst->id); 97 v4l2_event_queue_fh(&inst->fh, &ev); 98 99 return 0; 100 } 101 102 int vpu_notify_source_change(struct vpu_inst *inst) 103 { 104 static const struct v4l2_event ev = { 105 .id = 0, 106 .type = V4L2_EVENT_SOURCE_CHANGE, 107 .u.src_change.changes = V4L2_EVENT_SRC_CH_RESOLUTION 108 }; 109 110 vpu_trace(inst->dev, "[%d]\n", inst->id); 111 v4l2_event_queue_fh(&inst->fh, &ev); 112 return 0; 113 } 114 115 int vpu_set_last_buffer_dequeued(struct vpu_inst *inst, bool eos) 116 { 117 struct vb2_queue *q; 118 119 if (!inst || !inst->fh.m2m_ctx) 120 return -EINVAL; 121 122 q = v4l2_m2m_get_dst_vq(inst->fh.m2m_ctx); 123 if (!list_empty(&q->done_list)) 124 return -EINVAL; 125 126 if (q->last_buffer_dequeued) 127 return 0; 128 vpu_trace(inst->dev, "last buffer dequeued\n"); 129 q->last_buffer_dequeued = true; 130 wake_up(&q->done_wq); 131 if (eos) 132 vpu_notify_eos(inst); 133 return 0; 134 } 135 136 bool vpu_is_source_empty(struct vpu_inst *inst) 137 { 138 struct v4l2_m2m_buffer *buf = NULL; 139 140 if (!inst->fh.m2m_ctx) 141 return true; 142 v4l2_m2m_for_each_src_buf(inst->fh.m2m_ctx, buf) { 143 if (vpu_get_buffer_state(&buf->vb) == VPU_BUF_STATE_IDLE) 144 return false; 145 } 146 return true; 147 } 148 149 static int vpu_init_format(struct vpu_inst *inst, struct vpu_format *fmt) 150 { 151 const struct vpu_format *info; 152 153 info = vpu_helper_find_format(inst, fmt->type, fmt->pixfmt); 154 if (!info) { 155 info = vpu_helper_enum_format(inst, fmt->type, 0); 156 if (!info) 157 return -EINVAL; 158 } 159 memcpy(fmt, info, sizeof(*fmt)); 160 161 return 0; 162 } 163 164 static int vpu_calc_fmt_bytesperline(struct v4l2_format *f, struct vpu_format *fmt) 165 { 166 struct v4l2_pix_format_mplane *pixmp = &f->fmt.pix_mp; 167 int i; 168 169 if (fmt->flags & V4L2_FMT_FLAG_COMPRESSED) { 170 for (i = 0; i < fmt->comp_planes; i++) 171 fmt->bytesperline[i] = 0; 172 return 0; 173 } 174 if (pixmp->num_planes == fmt->comp_planes) { 175 for (i = 0; i < fmt->comp_planes; i++) 176 fmt->bytesperline[i] = pixmp->plane_fmt[i].bytesperline; 177 return 0; 178 } 179 if (pixmp->num_planes > 1) 180 return -EINVAL; 181 182 /*amphion vpu only support nv12 and nv12 tiled, 183 * so the bytesperline of luma and chroma should be same 184 */ 185 for (i = 0; i < fmt->comp_planes; i++) 186 fmt->bytesperline[i] = pixmp->plane_fmt[0].bytesperline; 187 188 return 0; 189 } 190 191 static int vpu_calc_fmt_sizeimage(struct vpu_inst *inst, struct vpu_format *fmt) 192 { 193 u32 stride = 1; 194 int i; 195 196 if (!(fmt->flags & V4L2_FMT_FLAG_COMPRESSED)) { 197 const struct vpu_core_resources *res = vpu_get_resource(inst); 198 199 if (res) 200 stride = res->stride; 201 } 202 203 for (i = 0; i < fmt->comp_planes; i++) { 204 fmt->sizeimage[i] = vpu_helper_get_plane_size(fmt->pixfmt, 205 fmt->width, 206 fmt->height, 207 i, 208 stride, 209 fmt->field != V4L2_FIELD_NONE ? 1 : 0, 210 &fmt->bytesperline[i]); 211 fmt->sizeimage[i] = max_t(u32, fmt->sizeimage[i], PAGE_SIZE); 212 if (fmt->flags & V4L2_FMT_FLAG_COMPRESSED) { 213 fmt->sizeimage[i] = clamp_val(fmt->sizeimage[i], SZ_128K, SZ_8M); 214 fmt->bytesperline[i] = 0; 215 } 216 } 217 218 return 0; 219 } 220 221 u32 vpu_get_fmt_plane_size(struct vpu_format *fmt, u32 plane_no) 222 { 223 u32 size; 224 int i; 225 226 if (plane_no >= fmt->mem_planes) 227 return 0; 228 229 if (fmt->comp_planes == fmt->mem_planes) 230 return fmt->sizeimage[plane_no]; 231 if (plane_no < fmt->mem_planes - 1) 232 return fmt->sizeimage[plane_no]; 233 234 size = fmt->sizeimage[plane_no]; 235 for (i = fmt->mem_planes; i < fmt->comp_planes; i++) 236 size += fmt->sizeimage[i]; 237 238 return size; 239 } 240 241 int vpu_try_fmt_common(struct vpu_inst *inst, struct v4l2_format *f, struct vpu_format *fmt) 242 { 243 struct v4l2_pix_format_mplane *pixmp = &f->fmt.pix_mp; 244 int i; 245 int ret; 246 247 fmt->pixfmt = pixmp->pixelformat; 248 fmt->type = f->type; 249 ret = vpu_init_format(inst, fmt); 250 if (ret < 0) 251 return ret; 252 253 fmt->width = pixmp->width; 254 fmt->height = pixmp->height; 255 if (fmt->width) 256 fmt->width = vpu_helper_valid_frame_width(inst, fmt->width); 257 if (fmt->height) 258 fmt->height = vpu_helper_valid_frame_height(inst, fmt->height); 259 fmt->field = pixmp->field == V4L2_FIELD_ANY ? V4L2_FIELD_NONE : pixmp->field; 260 vpu_calc_fmt_bytesperline(f, fmt); 261 vpu_calc_fmt_sizeimage(inst, fmt); 262 if ((fmt->flags & V4L2_FMT_FLAG_COMPRESSED) && pixmp->plane_fmt[0].sizeimage) 263 fmt->sizeimage[0] = clamp_val(pixmp->plane_fmt[0].sizeimage, SZ_128K, SZ_8M); 264 265 pixmp->pixelformat = fmt->pixfmt; 266 pixmp->width = fmt->width; 267 pixmp->height = fmt->height; 268 pixmp->flags = fmt->flags; 269 pixmp->num_planes = fmt->mem_planes; 270 pixmp->field = fmt->field; 271 memset(pixmp->reserved, 0, sizeof(pixmp->reserved)); 272 for (i = 0; i < pixmp->num_planes; i++) { 273 pixmp->plane_fmt[i].bytesperline = fmt->bytesperline[i]; 274 pixmp->plane_fmt[i].sizeimage = vpu_get_fmt_plane_size(fmt, i); 275 memset(pixmp->plane_fmt[i].reserved, 0, sizeof(pixmp->plane_fmt[i].reserved)); 276 } 277 278 return 0; 279 } 280 281 static bool vpu_check_ready(struct vpu_inst *inst, u32 type) 282 { 283 if (!inst) 284 return false; 285 if (inst->state == VPU_CODEC_STATE_DEINIT || inst->id < 0) 286 return false; 287 if (!inst->ops->check_ready) 288 return true; 289 return call_vop(inst, check_ready, type); 290 } 291 292 int vpu_process_output_buffer(struct vpu_inst *inst) 293 { 294 struct v4l2_m2m_buffer *buf = NULL; 295 struct vb2_v4l2_buffer *vbuf = NULL; 296 297 if (!inst || !inst->fh.m2m_ctx) 298 return -EINVAL; 299 300 if (!vpu_check_ready(inst, inst->out_format.type)) 301 return -EINVAL; 302 303 v4l2_m2m_for_each_src_buf(inst->fh.m2m_ctx, buf) { 304 vbuf = &buf->vb; 305 if (vpu_get_buffer_state(vbuf) == VPU_BUF_STATE_IDLE) 306 break; 307 vbuf = NULL; 308 } 309 310 if (!vbuf) 311 return -EINVAL; 312 313 dev_dbg(inst->dev, "[%d]frame id = %d / %d\n", 314 inst->id, vbuf->sequence, inst->sequence); 315 return call_vop(inst, process_output, &vbuf->vb2_buf); 316 } 317 318 int vpu_process_capture_buffer(struct vpu_inst *inst) 319 { 320 struct v4l2_m2m_buffer *buf = NULL; 321 struct vb2_v4l2_buffer *vbuf = NULL; 322 323 if (!inst || !inst->fh.m2m_ctx) 324 return -EINVAL; 325 326 if (!vpu_check_ready(inst, inst->cap_format.type)) 327 return -EINVAL; 328 329 v4l2_m2m_for_each_dst_buf(inst->fh.m2m_ctx, buf) { 330 vbuf = &buf->vb; 331 if (vpu_get_buffer_state(vbuf) == VPU_BUF_STATE_IDLE) 332 break; 333 vbuf = NULL; 334 } 335 if (!vbuf) 336 return -EINVAL; 337 338 return call_vop(inst, process_capture, &vbuf->vb2_buf); 339 } 340 341 struct vb2_v4l2_buffer *vpu_next_src_buf(struct vpu_inst *inst) 342 { 343 struct vb2_v4l2_buffer *src_buf = NULL; 344 345 if (!inst->fh.m2m_ctx) 346 return NULL; 347 348 src_buf = v4l2_m2m_next_src_buf(inst->fh.m2m_ctx); 349 if (!src_buf || vpu_get_buffer_state(src_buf) == VPU_BUF_STATE_IDLE) 350 return NULL; 351 352 while (vpu_vb_is_codecconfig(src_buf)) { 353 v4l2_m2m_src_buf_remove(inst->fh.m2m_ctx); 354 vpu_set_buffer_state(src_buf, VPU_BUF_STATE_IDLE); 355 v4l2_m2m_buf_done(src_buf, VB2_BUF_STATE_DONE); 356 357 src_buf = v4l2_m2m_next_src_buf(inst->fh.m2m_ctx); 358 if (!src_buf || vpu_get_buffer_state(src_buf) == VPU_BUF_STATE_IDLE) 359 return NULL; 360 } 361 362 return src_buf; 363 } 364 365 void vpu_skip_frame(struct vpu_inst *inst, int count) 366 { 367 struct vb2_v4l2_buffer *src_buf; 368 enum vb2_buffer_state state; 369 int i = 0; 370 371 if (count <= 0 || !inst->fh.m2m_ctx) 372 return; 373 374 while (i < count) { 375 src_buf = v4l2_m2m_src_buf_remove(inst->fh.m2m_ctx); 376 if (!src_buf || vpu_get_buffer_state(src_buf) == VPU_BUF_STATE_IDLE) 377 return; 378 if (vpu_get_buffer_state(src_buf) == VPU_BUF_STATE_DECODED) 379 state = VB2_BUF_STATE_DONE; 380 else 381 state = VB2_BUF_STATE_ERROR; 382 i++; 383 vpu_set_buffer_state(src_buf, VPU_BUF_STATE_IDLE); 384 v4l2_m2m_buf_done(src_buf, state); 385 } 386 } 387 388 struct vb2_v4l2_buffer *vpu_find_buf_by_sequence(struct vpu_inst *inst, u32 type, u32 sequence) 389 { 390 struct v4l2_m2m_buffer *buf = NULL; 391 struct vb2_v4l2_buffer *vbuf = NULL; 392 393 if (!inst || !inst->fh.m2m_ctx) 394 return NULL; 395 396 if (V4L2_TYPE_IS_OUTPUT(type)) { 397 v4l2_m2m_for_each_src_buf(inst->fh.m2m_ctx, buf) { 398 vbuf = &buf->vb; 399 if (vbuf->sequence == sequence) 400 break; 401 vbuf = NULL; 402 } 403 } else { 404 v4l2_m2m_for_each_dst_buf(inst->fh.m2m_ctx, buf) { 405 vbuf = &buf->vb; 406 if (vbuf->sequence == sequence) 407 break; 408 vbuf = NULL; 409 } 410 } 411 412 return vbuf; 413 } 414 415 struct vb2_v4l2_buffer *vpu_find_buf_by_idx(struct vpu_inst *inst, u32 type, u32 idx) 416 { 417 struct v4l2_m2m_buffer *buf = NULL; 418 struct vb2_v4l2_buffer *vbuf = NULL; 419 420 if (!inst || !inst->fh.m2m_ctx) 421 return NULL; 422 423 if (V4L2_TYPE_IS_OUTPUT(type)) { 424 v4l2_m2m_for_each_src_buf(inst->fh.m2m_ctx, buf) { 425 vbuf = &buf->vb; 426 if (vbuf->vb2_buf.index == idx) 427 break; 428 vbuf = NULL; 429 } 430 } else { 431 v4l2_m2m_for_each_dst_buf(inst->fh.m2m_ctx, buf) { 432 vbuf = &buf->vb; 433 if (vbuf->vb2_buf.index == idx) 434 break; 435 vbuf = NULL; 436 } 437 } 438 439 return vbuf; 440 } 441 442 int vpu_get_num_buffers(struct vpu_inst *inst, u32 type) 443 { 444 struct vb2_queue *q; 445 446 if (!inst || !inst->fh.m2m_ctx) 447 return -EINVAL; 448 449 if (V4L2_TYPE_IS_OUTPUT(type)) 450 q = v4l2_m2m_get_src_vq(inst->fh.m2m_ctx); 451 else 452 q = v4l2_m2m_get_dst_vq(inst->fh.m2m_ctx); 453 454 return vb2_get_num_buffers(q); 455 } 456 457 static void vpu_m2m_device_run(void *priv) 458 { 459 } 460 461 static void vpu_m2m_job_abort(void *priv) 462 { 463 struct vpu_inst *inst = priv; 464 struct v4l2_m2m_ctx *m2m_ctx = inst->fh.m2m_ctx; 465 466 v4l2_m2m_job_finish(m2m_ctx->m2m_dev, m2m_ctx); 467 } 468 469 static const struct v4l2_m2m_ops vpu_m2m_ops = { 470 .device_run = vpu_m2m_device_run, 471 .job_abort = vpu_m2m_job_abort 472 }; 473 474 static int vpu_vb2_queue_setup(struct vb2_queue *vq, 475 unsigned int *buf_count, 476 unsigned int *plane_count, 477 unsigned int psize[], 478 struct device *allocators[]) 479 { 480 struct vpu_inst *inst = vb2_get_drv_priv(vq); 481 struct vpu_format *cur_fmt; 482 int i; 483 484 cur_fmt = vpu_get_format(inst, vq->type); 485 486 if (*plane_count) { 487 if (*plane_count != cur_fmt->mem_planes) 488 return -EINVAL; 489 for (i = 0; i < cur_fmt->mem_planes; i++) { 490 if (psize[i] < vpu_get_fmt_plane_size(cur_fmt, i)) 491 return -EINVAL; 492 } 493 return 0; 494 } 495 496 if (V4L2_TYPE_IS_OUTPUT(vq->type)) 497 *buf_count = max_t(unsigned int, *buf_count, inst->min_buffer_out); 498 else 499 *buf_count = max_t(unsigned int, *buf_count, inst->min_buffer_cap); 500 *plane_count = cur_fmt->mem_planes; 501 for (i = 0; i < cur_fmt->mem_planes; i++) 502 psize[i] = vpu_get_fmt_plane_size(cur_fmt, i); 503 504 if (V4L2_TYPE_IS_OUTPUT(vq->type) && inst->state == VPU_CODEC_STATE_SEEK) { 505 vpu_trace(inst->dev, "reinit when VIDIOC_REQBUFS(OUTPUT, 0)\n"); 506 call_void_vop(inst, release); 507 } 508 509 if (V4L2_TYPE_IS_CAPTURE(vq->type)) 510 call_void_vop(inst, reset_frame_store); 511 512 return 0; 513 } 514 515 static int vpu_vb2_buf_init(struct vb2_buffer *vb) 516 { 517 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 518 struct vpu_vb2_buffer *vpu_buf = to_vpu_vb2_buffer(vbuf); 519 struct vpu_inst *inst = vb2_get_drv_priv(vb->vb2_queue); 520 521 vpu_buf->fs_id = -1; 522 vpu_set_buffer_state(vbuf, VPU_BUF_STATE_IDLE); 523 524 if (!inst->ops->attach_frame_store || V4L2_TYPE_IS_OUTPUT(vb->type)) 525 return 0; 526 527 call_void_vop(inst, attach_frame_store, vb); 528 return 0; 529 } 530 531 static int vpu_vb2_buf_out_validate(struct vb2_buffer *vb) 532 { 533 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 534 535 vbuf->field = V4L2_FIELD_NONE; 536 537 return 0; 538 } 539 540 static int vpu_vb2_buf_prepare(struct vb2_buffer *vb) 541 { 542 struct vpu_inst *inst = vb2_get_drv_priv(vb->vb2_queue); 543 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 544 struct vpu_format *cur_fmt; 545 u32 i; 546 547 cur_fmt = vpu_get_format(inst, vb->type); 548 for (i = 0; i < cur_fmt->mem_planes; i++) { 549 if (vpu_get_vb_length(vb, i) < vpu_get_fmt_plane_size(cur_fmt, i)) { 550 dev_dbg(inst->dev, "[%d] %s buf[%d] is invalid\n", 551 inst->id, vpu_type_name(vb->type), vb->index); 552 vpu_set_buffer_state(vbuf, VPU_BUF_STATE_ERROR); 553 } 554 } 555 556 return 0; 557 } 558 559 static void vpu_vb2_buf_finish(struct vb2_buffer *vb) 560 { 561 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 562 struct vpu_inst *inst = vb2_get_drv_priv(vb->vb2_queue); 563 struct vb2_queue *q = vb->vb2_queue; 564 565 if (V4L2_TYPE_IS_CAPTURE(vb->type)) { 566 struct vpu_vb2_buffer *vpu_buf = to_vpu_vb2_buffer(vbuf); 567 struct v4l2_ctrl *ctrl = v4l2_ctrl_find(&inst->ctrl_handler, 568 V4L2_CID_MPEG_VIDEO_AVERAGE_QP); 569 570 if (ctrl) 571 v4l2_ctrl_s_ctrl(ctrl, vpu_buf->average_qp); 572 } 573 574 if (vbuf->flags & V4L2_BUF_FLAG_LAST) 575 vpu_notify_eos(inst); 576 577 if (list_empty(&q->done_list)) 578 call_void_vop(inst, on_queue_empty, q->type); 579 } 580 581 static void vpu_vb2_buffers_return(struct vpu_inst *inst, unsigned int type, 582 enum vb2_buffer_state state) 583 { 584 struct vb2_v4l2_buffer *buf; 585 586 if (V4L2_TYPE_IS_OUTPUT(type)) { 587 while ((buf = v4l2_m2m_src_buf_remove(inst->fh.m2m_ctx))) { 588 vpu_set_buffer_state(buf, VPU_BUF_STATE_IDLE); 589 v4l2_m2m_buf_done(buf, state); 590 } 591 } else { 592 while ((buf = v4l2_m2m_dst_buf_remove(inst->fh.m2m_ctx))) { 593 vpu_set_buffer_state(buf, VPU_BUF_STATE_IDLE); 594 v4l2_m2m_buf_done(buf, state); 595 } 596 } 597 } 598 599 static int vpu_vb2_start_streaming(struct vb2_queue *q, unsigned int count) 600 { 601 struct vpu_inst *inst = vb2_get_drv_priv(q); 602 struct vpu_format *fmt = vpu_get_format(inst, q->type); 603 int ret; 604 605 vpu_inst_unlock(inst); 606 ret = vpu_inst_register(inst); 607 vpu_inst_lock(inst); 608 if (ret) { 609 vpu_vb2_buffers_return(inst, q->type, VB2_BUF_STATE_QUEUED); 610 return ret; 611 } 612 613 vpu_trace(inst->dev, "[%d] %s %c%c%c%c %dx%d %u(%u) %u(%u) %u(%u) %d\n", 614 inst->id, vpu_type_name(q->type), 615 fmt->pixfmt, 616 fmt->pixfmt >> 8, 617 fmt->pixfmt >> 16, 618 fmt->pixfmt >> 24, 619 fmt->width, fmt->height, 620 fmt->sizeimage[0], fmt->bytesperline[0], 621 fmt->sizeimage[1], fmt->bytesperline[1], 622 fmt->sizeimage[2], fmt->bytesperline[2], 623 vb2_get_num_buffers(q)); 624 vb2_clear_last_buffer_dequeued(q); 625 ret = call_vop(inst, start, q->type); 626 if (ret) 627 vpu_vb2_buffers_return(inst, q->type, VB2_BUF_STATE_QUEUED); 628 629 return ret; 630 } 631 632 static void vpu_vb2_stop_streaming(struct vb2_queue *q) 633 { 634 struct vpu_inst *inst = vb2_get_drv_priv(q); 635 636 vpu_trace(inst->dev, "[%d] %s\n", inst->id, vpu_type_name(q->type)); 637 638 call_void_vop(inst, stop, q->type); 639 vpu_vb2_buffers_return(inst, q->type, VB2_BUF_STATE_ERROR); 640 if (V4L2_TYPE_IS_OUTPUT(q->type)) 641 inst->sequence = 0; 642 } 643 644 static void vpu_vb2_buf_queue(struct vb2_buffer *vb) 645 { 646 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 647 struct vpu_inst *inst = vb2_get_drv_priv(vb->vb2_queue); 648 649 if (V4L2_TYPE_IS_OUTPUT(vb->type)) 650 vbuf->sequence = inst->sequence++; 651 652 v4l2_m2m_buf_queue(inst->fh.m2m_ctx, vbuf); 653 vpu_process_output_buffer(inst); 654 vpu_process_capture_buffer(inst); 655 } 656 657 static const struct vb2_ops vpu_vb2_ops = { 658 .queue_setup = vpu_vb2_queue_setup, 659 .buf_init = vpu_vb2_buf_init, 660 .buf_out_validate = vpu_vb2_buf_out_validate, 661 .buf_prepare = vpu_vb2_buf_prepare, 662 .buf_finish = vpu_vb2_buf_finish, 663 .start_streaming = vpu_vb2_start_streaming, 664 .stop_streaming = vpu_vb2_stop_streaming, 665 .buf_queue = vpu_vb2_buf_queue, 666 }; 667 668 static int vpu_m2m_queue_init(void *priv, struct vb2_queue *src_vq, struct vb2_queue *dst_vq) 669 { 670 struct vpu_inst *inst = priv; 671 int ret; 672 673 src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 674 inst->out_format.type = src_vq->type; 675 src_vq->io_modes = VB2_MMAP | VB2_DMABUF; 676 src_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY; 677 src_vq->ops = &vpu_vb2_ops; 678 src_vq->mem_ops = &vb2_dma_contig_memops; 679 if (inst->type == VPU_CORE_TYPE_DEC && inst->use_stream_buffer) 680 src_vq->mem_ops = &vb2_vmalloc_memops; 681 src_vq->drv_priv = inst; 682 src_vq->buf_struct_size = sizeof(struct vpu_vb2_buffer); 683 src_vq->min_queued_buffers = 1; 684 src_vq->dev = inst->vpu->dev; 685 src_vq->lock = &inst->lock; 686 ret = vb2_queue_init(src_vq); 687 if (ret) 688 return ret; 689 690 dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 691 inst->cap_format.type = dst_vq->type; 692 dst_vq->io_modes = VB2_MMAP | VB2_DMABUF; 693 dst_vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY; 694 dst_vq->ops = &vpu_vb2_ops; 695 dst_vq->mem_ops = &vb2_dma_contig_memops; 696 if (inst->type == VPU_CORE_TYPE_ENC && inst->use_stream_buffer) 697 dst_vq->mem_ops = &vb2_vmalloc_memops; 698 dst_vq->drv_priv = inst; 699 dst_vq->buf_struct_size = sizeof(struct vpu_vb2_buffer); 700 dst_vq->min_queued_buffers = 1; 701 dst_vq->dev = inst->vpu->dev; 702 dst_vq->lock = &inst->lock; 703 ret = vb2_queue_init(dst_vq); 704 if (ret) { 705 vb2_queue_release(src_vq); 706 return ret; 707 } 708 709 return 0; 710 } 711 712 static int vpu_v4l2_release(struct vpu_inst *inst) 713 { 714 vpu_trace(inst->vpu->dev, "%p\n", inst); 715 716 vpu_release_core(inst->core); 717 put_device(inst->dev); 718 719 if (inst->workqueue) { 720 cancel_work_sync(&inst->msg_work); 721 destroy_workqueue(inst->workqueue); 722 inst->workqueue = NULL; 723 } 724 725 v4l2_ctrl_handler_free(&inst->ctrl_handler); 726 mutex_destroy(&inst->lock); 727 728 call_void_vop(inst, cleanup); 729 730 return 0; 731 } 732 733 int vpu_v4l2_open(struct file *file, struct vpu_inst *inst) 734 { 735 struct vpu_dev *vpu = video_drvdata(file); 736 struct vpu_func *func; 737 int ret = 0; 738 739 if (!inst || !inst->ops) 740 return -EINVAL; 741 742 if (inst->type == VPU_CORE_TYPE_ENC) 743 func = &vpu->encoder; 744 else 745 func = &vpu->decoder; 746 747 atomic_set(&inst->ref_count, 0); 748 atomic_long_set(&inst->last_response_cmd, 0); 749 vpu_inst_get(inst); 750 inst->vpu = vpu; 751 inst->core = vpu_request_core(vpu, inst->type); 752 if (inst->core) 753 inst->dev = get_device(inst->core->dev); 754 mutex_init(&inst->lock); 755 INIT_LIST_HEAD(&inst->cmd_q); 756 inst->id = VPU_INST_NULL_ID; 757 inst->release = vpu_v4l2_release; 758 inst->pid = current->pid; 759 inst->tgid = current->tgid; 760 inst->min_buffer_cap = 2; 761 inst->min_buffer_out = 2; 762 v4l2_fh_init(&inst->fh, func->vfd); 763 v4l2_fh_add(&inst->fh, file); 764 765 ret = call_vop(inst, ctrl_init); 766 if (ret) 767 goto error; 768 769 inst->fh.m2m_ctx = v4l2_m2m_ctx_init(func->m2m_dev, inst, vpu_m2m_queue_init); 770 if (IS_ERR(inst->fh.m2m_ctx)) { 771 dev_err(vpu->dev, "v4l2_m2m_ctx_init fail\n"); 772 ret = PTR_ERR(inst->fh.m2m_ctx); 773 goto error; 774 } 775 776 inst->fh.ctrl_handler = &inst->ctrl_handler; 777 inst->state = VPU_CODEC_STATE_DEINIT; 778 inst->workqueue = alloc_ordered_workqueue("vpu_inst", WQ_MEM_RECLAIM); 779 if (inst->workqueue) { 780 INIT_WORK(&inst->msg_work, vpu_inst_run_work); 781 ret = kfifo_init(&inst->msg_fifo, 782 inst->msg_buffer, 783 rounddown_pow_of_two(sizeof(inst->msg_buffer))); 784 if (ret) { 785 destroy_workqueue(inst->workqueue); 786 inst->workqueue = NULL; 787 } 788 } 789 vpu_trace(vpu->dev, "tgid = %d, pid = %d, type = %s, inst = %p\n", 790 inst->tgid, inst->pid, vpu_core_type_desc(inst->type), inst); 791 792 return 0; 793 error: 794 v4l2_fh_del(&inst->fh, file); 795 v4l2_fh_exit(&inst->fh); 796 vpu_inst_put(inst); 797 return ret; 798 } 799 800 int vpu_v4l2_close(struct file *file) 801 { 802 struct vpu_dev *vpu = video_drvdata(file); 803 struct vpu_inst *inst = to_inst(file); 804 805 vpu_trace(vpu->dev, "tgid = %d, pid = %d, inst = %p\n", inst->tgid, inst->pid, inst); 806 807 vpu_inst_lock(inst); 808 if (inst->fh.m2m_ctx) { 809 v4l2_m2m_ctx_release(inst->fh.m2m_ctx); 810 inst->fh.m2m_ctx = NULL; 811 } 812 call_void_vop(inst, release); 813 vpu_inst_unlock(inst); 814 815 v4l2_fh_del(&inst->fh, file); 816 v4l2_fh_exit(&inst->fh); 817 818 vpu_inst_unregister(inst); 819 vpu_inst_put(inst); 820 821 return 0; 822 } 823 824 int vpu_add_func(struct vpu_dev *vpu, struct vpu_func *func) 825 { 826 struct video_device *vfd; 827 int ret; 828 829 if (!vpu || !func) 830 return -EINVAL; 831 832 if (func->vfd) 833 return 0; 834 835 func->m2m_dev = v4l2_m2m_init(&vpu_m2m_ops); 836 if (IS_ERR(func->m2m_dev)) { 837 dev_err(vpu->dev, "v4l2_m2m_init fail\n"); 838 func->vfd = NULL; 839 return PTR_ERR(func->m2m_dev); 840 } 841 842 vfd = video_device_alloc(); 843 if (!vfd) { 844 v4l2_m2m_release(func->m2m_dev); 845 dev_err(vpu->dev, "alloc vpu decoder video device fail\n"); 846 return -ENOMEM; 847 } 848 vfd->release = video_device_release; 849 vfd->vfl_dir = VFL_DIR_M2M; 850 vfd->v4l2_dev = &vpu->v4l2_dev; 851 vfd->device_caps = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; 852 if (func->type == VPU_CORE_TYPE_ENC) { 853 strscpy(vfd->name, "amphion-vpu-encoder", sizeof(vfd->name)); 854 vfd->fops = venc_get_fops(); 855 vfd->ioctl_ops = venc_get_ioctl_ops(); 856 } else { 857 strscpy(vfd->name, "amphion-vpu-decoder", sizeof(vfd->name)); 858 vfd->fops = vdec_get_fops(); 859 vfd->ioctl_ops = vdec_get_ioctl_ops(); 860 } 861 video_set_drvdata(vfd, vpu); 862 863 ret = video_register_device(vfd, VFL_TYPE_VIDEO, -1); 864 if (ret) { 865 video_device_release(vfd); 866 v4l2_m2m_release(func->m2m_dev); 867 return ret; 868 } 869 func->vfd = vfd; 870 871 ret = v4l2_m2m_register_media_controller(func->m2m_dev, func->vfd, func->function); 872 if (ret) { 873 v4l2_m2m_release(func->m2m_dev); 874 func->m2m_dev = NULL; 875 video_unregister_device(func->vfd); 876 func->vfd = NULL; 877 return ret; 878 } 879 880 return 0; 881 } 882 883 void vpu_remove_func(struct vpu_func *func) 884 { 885 if (!func) 886 return; 887 888 if (func->m2m_dev) { 889 v4l2_m2m_unregister_media_controller(func->m2m_dev); 890 v4l2_m2m_release(func->m2m_dev); 891 func->m2m_dev = NULL; 892 } 893 if (func->vfd) { 894 video_unregister_device(func->vfd); 895 func->vfd = NULL; 896 } 897 } 898