1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Hantro VPU codec driver 4 * 5 * Copyright (C) 2018 Collabora, Ltd. 6 * Copyright (C) 2018 Rockchip Electronics Co., Ltd. 7 * Alpha Lin <Alpha.Lin@rock-chips.com> 8 * Jeffy Chen <jeffy.chen@rock-chips.com> 9 * 10 * Copyright 2018 Google LLC. 11 * Tomasz Figa <tfiga@chromium.org> 12 * 13 * Based on s5p-mfc driver by Samsung Electronics Co., Ltd. 14 * Copyright (C) 2010-2011 Samsung Electronics Co., Ltd. 15 */ 16 17 #include <linux/interrupt.h> 18 #include <linux/io.h> 19 #include <linux/module.h> 20 #include <linux/pm_runtime.h> 21 #include <linux/videodev2.h> 22 #include <linux/workqueue.h> 23 #include <media/v4l2-ctrls.h> 24 #include <media/v4l2-event.h> 25 #include <media/v4l2-mem2mem.h> 26 27 #include "hantro.h" 28 #include "hantro_hw.h" 29 #include "hantro_v4l2.h" 30 31 #define HANTRO_DEFAULT_BIT_DEPTH 8 32 33 static int hantro_set_fmt_out(struct hantro_ctx *ctx, 34 struct v4l2_pix_format_mplane *pix_mp, 35 bool need_postproc); 36 static int hantro_set_fmt_cap(struct hantro_ctx *ctx, 37 struct v4l2_pix_format_mplane *pix_mp); 38 39 static const struct hantro_fmt * 40 hantro_get_formats(const struct hantro_ctx *ctx, unsigned int *num_fmts, bool need_postproc) 41 { 42 const struct hantro_fmt *formats; 43 44 if (need_postproc) { 45 *num_fmts = 0; 46 return NULL; 47 } 48 49 if (ctx->is_encoder) { 50 formats = ctx->dev->variant->enc_fmts; 51 *num_fmts = ctx->dev->variant->num_enc_fmts; 52 } else { 53 formats = ctx->dev->variant->dec_fmts; 54 *num_fmts = ctx->dev->variant->num_dec_fmts; 55 } 56 57 return formats; 58 } 59 60 static const struct hantro_fmt * 61 hantro_get_postproc_formats(const struct hantro_ctx *ctx, 62 unsigned int *num_fmts) 63 { 64 struct hantro_dev *vpu = ctx->dev; 65 66 if (ctx->is_encoder || !vpu->variant->postproc_fmts) { 67 *num_fmts = 0; 68 return NULL; 69 } 70 71 *num_fmts = ctx->dev->variant->num_postproc_fmts; 72 return ctx->dev->variant->postproc_fmts; 73 } 74 75 int hantro_get_format_depth(u32 fourcc) 76 { 77 switch (fourcc) { 78 case V4L2_PIX_FMT_P010: 79 case V4L2_PIX_FMT_P010_4L4: 80 case V4L2_PIX_FMT_NV15: 81 case V4L2_PIX_FMT_NV15_4L4: 82 return 10; 83 default: 84 return 8; 85 } 86 } 87 88 static bool 89 hantro_check_depth_match(const struct hantro_fmt *fmt, int bit_depth) 90 { 91 int fmt_depth; 92 93 if (!fmt->match_depth && !fmt->postprocessed) 94 return true; 95 96 /* 0 means default depth, which is 8 */ 97 if (!bit_depth) 98 bit_depth = HANTRO_DEFAULT_BIT_DEPTH; 99 100 fmt_depth = hantro_get_format_depth(fmt->fourcc); 101 102 /* 103 * Allow only downconversion for postproc formats for now. 104 * It may be possible to relax that on some HW. 105 */ 106 if (!fmt->match_depth) 107 return fmt_depth <= bit_depth; 108 109 return fmt_depth == bit_depth; 110 } 111 112 static const struct hantro_fmt * 113 hantro_find_format(const struct hantro_ctx *ctx, u32 fourcc) 114 { 115 const struct hantro_fmt *formats; 116 unsigned int i, num_fmts; 117 118 formats = hantro_get_formats(ctx, &num_fmts, HANTRO_AUTO_POSTPROC); 119 for (i = 0; i < num_fmts; i++) 120 if (formats[i].fourcc == fourcc) 121 return &formats[i]; 122 123 formats = hantro_get_postproc_formats(ctx, &num_fmts); 124 for (i = 0; i < num_fmts; i++) 125 if (formats[i].fourcc == fourcc) 126 return &formats[i]; 127 return NULL; 128 } 129 130 static int 131 hantro_set_reference_frames_format(struct hantro_ctx *ctx) 132 { 133 const struct hantro_fmt *fmt; 134 int dst_bit_depth = hantro_get_format_depth(ctx->vpu_dst_fmt->fourcc); 135 136 fmt = hantro_get_default_fmt(ctx, false, dst_bit_depth, HANTRO_AUTO_POSTPROC); 137 if (!fmt) 138 return -EINVAL; 139 140 ctx->ref_fmt.width = ctx->src_fmt.width; 141 ctx->ref_fmt.height = ctx->src_fmt.height; 142 143 v4l2_apply_frmsize_constraints(&ctx->ref_fmt.width, &ctx->ref_fmt.height, &fmt->frmsize); 144 return v4l2_fill_pixfmt_mp(&ctx->ref_fmt, fmt->fourcc, 145 ctx->ref_fmt.width, ctx->ref_fmt.height); 146 } 147 148 const struct hantro_fmt * 149 hantro_get_default_fmt(const struct hantro_ctx *ctx, bool bitstream, 150 int bit_depth, bool need_postproc) 151 { 152 const struct hantro_fmt *formats; 153 unsigned int i, num_fmts; 154 155 formats = hantro_get_formats(ctx, &num_fmts, need_postproc); 156 for (i = 0; i < num_fmts; i++) { 157 if (bitstream == (formats[i].codec_mode != 158 HANTRO_MODE_NONE) && 159 hantro_check_depth_match(&formats[i], bit_depth)) 160 return &formats[i]; 161 } 162 163 formats = hantro_get_postproc_formats(ctx, &num_fmts); 164 for (i = 0; i < num_fmts; i++) { 165 if (bitstream == (formats[i].codec_mode != 166 HANTRO_MODE_NONE) && 167 hantro_check_depth_match(&formats[i], bit_depth)) 168 return &formats[i]; 169 } 170 171 return NULL; 172 } 173 174 static int vidioc_querycap(struct file *file, void *priv, 175 struct v4l2_capability *cap) 176 { 177 struct hantro_dev *vpu = video_drvdata(file); 178 struct video_device *vdev = video_devdata(file); 179 180 strscpy(cap->driver, vpu->dev->driver->name, sizeof(cap->driver)); 181 strscpy(cap->card, vdev->name, sizeof(cap->card)); 182 return 0; 183 } 184 185 static int vidioc_enum_framesizes(struct file *file, void *priv, 186 struct v4l2_frmsizeenum *fsize) 187 { 188 struct hantro_ctx *ctx = fh_to_ctx(priv); 189 const struct hantro_fmt *fmt; 190 191 fmt = hantro_find_format(ctx, fsize->pixel_format); 192 if (!fmt) { 193 vpu_debug(0, "unsupported bitstream format (%08x)\n", 194 fsize->pixel_format); 195 return -EINVAL; 196 } 197 198 /* For non-coded formats check if postprocessing scaling is possible */ 199 if (fmt->codec_mode == HANTRO_MODE_NONE) { 200 if (hantro_needs_postproc(ctx, fmt)) 201 return hanto_postproc_enum_framesizes(ctx, fsize); 202 else 203 return -ENOTTY; 204 } else if (fsize->index != 0) { 205 vpu_debug(0, "invalid frame size index (expected 0, got %d)\n", 206 fsize->index); 207 return -EINVAL; 208 } 209 210 fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE; 211 fsize->stepwise = fmt->frmsize; 212 213 return 0; 214 } 215 216 static int vidioc_enum_fmt(struct file *file, void *priv, 217 struct v4l2_fmtdesc *f, bool capture) 218 219 { 220 struct hantro_ctx *ctx = fh_to_ctx(priv); 221 const struct hantro_fmt *fmt, *formats; 222 unsigned int num_fmts, i, j = 0; 223 bool skip_mode_none, enum_all_formats; 224 u32 index = f->index & ~V4L2_FMTDESC_FLAG_ENUM_ALL; 225 226 /* 227 * If the V4L2_FMTDESC_FLAG_ENUM_ALL flag is set, we want to enumerate all 228 * hardware supported pixel formats 229 */ 230 enum_all_formats = !!(f->index & V4L2_FMTDESC_FLAG_ENUM_ALL); 231 f->index = index; 232 233 /* 234 * When dealing with an encoder: 235 * - on the capture side we want to filter out all MODE_NONE formats. 236 * - on the output side we want to filter out all formats that are 237 * not MODE_NONE. 238 * When dealing with a decoder: 239 * - on the capture side we want to filter out all formats that are 240 * not MODE_NONE. 241 * - on the output side we want to filter out all MODE_NONE formats. 242 */ 243 skip_mode_none = capture == ctx->is_encoder; 244 245 formats = hantro_get_formats(ctx, &num_fmts, HANTRO_AUTO_POSTPROC); 246 for (i = 0; i < num_fmts; i++) { 247 bool mode_none = formats[i].codec_mode == HANTRO_MODE_NONE; 248 fmt = &formats[i]; 249 250 if (skip_mode_none == mode_none) 251 continue; 252 if (!hantro_check_depth_match(fmt, ctx->bit_depth) && !enum_all_formats) 253 continue; 254 if (j == index) { 255 f->pixelformat = fmt->fourcc; 256 return 0; 257 } 258 ++j; 259 } 260 261 /* 262 * Enumerate post-processed formats. As per the specification, 263 * we enumerated these formats after natively decoded formats 264 * as a hint for applications on what's the preferred fomat. 265 */ 266 if (!capture) 267 return -EINVAL; 268 formats = hantro_get_postproc_formats(ctx, &num_fmts); 269 for (i = 0; i < num_fmts; i++) { 270 fmt = &formats[i]; 271 272 if (!hantro_check_depth_match(fmt, ctx->bit_depth) && !enum_all_formats) 273 continue; 274 if (j == index) { 275 f->pixelformat = fmt->fourcc; 276 return 0; 277 } 278 ++j; 279 } 280 281 return -EINVAL; 282 } 283 284 static int vidioc_enum_fmt_vid_cap(struct file *file, void *priv, 285 struct v4l2_fmtdesc *f) 286 { 287 return vidioc_enum_fmt(file, priv, f, true); 288 } 289 290 static int vidioc_enum_fmt_vid_out(struct file *file, void *priv, 291 struct v4l2_fmtdesc *f) 292 { 293 return vidioc_enum_fmt(file, priv, f, false); 294 } 295 296 static int vidioc_g_fmt_out_mplane(struct file *file, void *priv, 297 struct v4l2_format *f) 298 { 299 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp; 300 struct hantro_ctx *ctx = fh_to_ctx(priv); 301 302 vpu_debug(4, "f->type = %d\n", f->type); 303 304 *pix_mp = ctx->src_fmt; 305 306 return 0; 307 } 308 309 static int vidioc_g_fmt_cap_mplane(struct file *file, void *priv, 310 struct v4l2_format *f) 311 { 312 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp; 313 struct hantro_ctx *ctx = fh_to_ctx(priv); 314 315 vpu_debug(4, "f->type = %d\n", f->type); 316 317 *pix_mp = ctx->dst_fmt; 318 319 return 0; 320 } 321 322 static int hantro_try_fmt(const struct hantro_ctx *ctx, 323 struct v4l2_pix_format_mplane *pix_mp, 324 enum v4l2_buf_type type) 325 { 326 const struct hantro_fmt *fmt; 327 const struct hantro_fmt *vpu_fmt; 328 bool capture = V4L2_TYPE_IS_CAPTURE(type); 329 bool coded; 330 331 coded = capture == ctx->is_encoder; 332 333 vpu_debug(4, "trying format %p4cc\n", &pix_mp->pixelformat); 334 335 fmt = hantro_find_format(ctx, pix_mp->pixelformat); 336 if (!fmt) { 337 fmt = hantro_get_default_fmt(ctx, coded, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC); 338 pix_mp->pixelformat = fmt->fourcc; 339 } 340 341 if (coded) { 342 pix_mp->num_planes = 1; 343 vpu_fmt = fmt; 344 } else if (ctx->is_encoder) { 345 vpu_fmt = hantro_find_format(ctx, ctx->dst_fmt.pixelformat); 346 } else { 347 /* 348 * Width/height on the CAPTURE end of a decoder are ignored and 349 * replaced by the OUTPUT ones. 350 */ 351 pix_mp->width = ctx->src_fmt.width; 352 pix_mp->height = ctx->src_fmt.height; 353 vpu_fmt = fmt; 354 } 355 356 pix_mp->field = V4L2_FIELD_NONE; 357 358 v4l2_apply_frmsize_constraints(&pix_mp->width, &pix_mp->height, 359 &vpu_fmt->frmsize); 360 361 if (!coded) { 362 /* Fill remaining fields */ 363 v4l2_fill_pixfmt_mp(pix_mp, fmt->fourcc, pix_mp->width, 364 pix_mp->height); 365 if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_H264_SLICE && 366 !hantro_needs_postproc(ctx, fmt)) 367 pix_mp->plane_fmt[0].sizeimage += 368 hantro_h264_mv_size(pix_mp->width, 369 pix_mp->height); 370 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_VP9_FRAME && 371 !hantro_needs_postproc(ctx, fmt)) 372 pix_mp->plane_fmt[0].sizeimage += 373 hantro_vp9_mv_size(pix_mp->width, 374 pix_mp->height); 375 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_HEVC_SLICE && 376 !hantro_needs_postproc(ctx, fmt)) 377 pix_mp->plane_fmt[0].sizeimage += 378 hantro_hevc_mv_size(pix_mp->width, 379 pix_mp->height); 380 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_AV1_FRAME && 381 !hantro_needs_postproc(ctx, fmt)) 382 pix_mp->plane_fmt[0].sizeimage += 383 hantro_av1_mv_size(pix_mp->width, 384 pix_mp->height); 385 } else if (!pix_mp->plane_fmt[0].sizeimage) { 386 /* 387 * For coded formats the application can specify 388 * sizeimage. If the application passes a zero sizeimage, 389 * let's default to the maximum frame size. 390 */ 391 pix_mp->plane_fmt[0].sizeimage = fmt->header_size + 392 pix_mp->width * pix_mp->height * fmt->max_depth; 393 } 394 395 return 0; 396 } 397 398 static int vidioc_try_fmt_cap_mplane(struct file *file, void *priv, 399 struct v4l2_format *f) 400 { 401 return hantro_try_fmt(fh_to_ctx(priv), &f->fmt.pix_mp, f->type); 402 } 403 404 static int vidioc_try_fmt_out_mplane(struct file *file, void *priv, 405 struct v4l2_format *f) 406 { 407 return hantro_try_fmt(fh_to_ctx(priv), &f->fmt.pix_mp, f->type); 408 } 409 410 static void 411 hantro_reset_fmt(struct v4l2_pix_format_mplane *fmt, 412 const struct hantro_fmt *vpu_fmt) 413 { 414 memset(fmt, 0, sizeof(*fmt)); 415 416 fmt->pixelformat = vpu_fmt->fourcc; 417 fmt->field = V4L2_FIELD_NONE; 418 fmt->colorspace = V4L2_COLORSPACE_JPEG; 419 fmt->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT; 420 fmt->quantization = V4L2_QUANTIZATION_DEFAULT; 421 fmt->xfer_func = V4L2_XFER_FUNC_DEFAULT; 422 } 423 424 static void 425 hantro_reset_encoded_fmt(struct hantro_ctx *ctx) 426 { 427 const struct hantro_fmt *vpu_fmt; 428 struct v4l2_pix_format_mplane fmt; 429 430 vpu_fmt = hantro_get_default_fmt(ctx, true, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC); 431 if (!vpu_fmt) 432 return; 433 434 hantro_reset_fmt(&fmt, vpu_fmt); 435 fmt.width = vpu_fmt->frmsize.min_width; 436 fmt.height = vpu_fmt->frmsize.min_height; 437 if (ctx->is_encoder) 438 hantro_set_fmt_cap(ctx, &fmt); 439 else 440 hantro_set_fmt_out(ctx, &fmt, HANTRO_AUTO_POSTPROC); 441 } 442 443 int 444 hantro_reset_raw_fmt(struct hantro_ctx *ctx, int bit_depth, bool need_postproc) 445 { 446 const struct hantro_fmt *raw_vpu_fmt; 447 struct v4l2_pix_format_mplane raw_fmt, *encoded_fmt; 448 int ret; 449 450 raw_vpu_fmt = hantro_get_default_fmt(ctx, false, bit_depth, need_postproc); 451 if (!raw_vpu_fmt) 452 return -EINVAL; 453 454 if (ctx->is_encoder) { 455 encoded_fmt = &ctx->dst_fmt; 456 ctx->vpu_src_fmt = raw_vpu_fmt; 457 } else { 458 encoded_fmt = &ctx->src_fmt; 459 } 460 461 hantro_reset_fmt(&raw_fmt, raw_vpu_fmt); 462 raw_fmt.width = encoded_fmt->width; 463 raw_fmt.height = encoded_fmt->height; 464 if (ctx->is_encoder) 465 ret = hantro_set_fmt_out(ctx, &raw_fmt, need_postproc); 466 else 467 ret = hantro_set_fmt_cap(ctx, &raw_fmt); 468 469 if (!ret) { 470 ctx->bit_depth = bit_depth; 471 ctx->need_postproc = need_postproc; 472 } 473 474 return ret; 475 } 476 477 void hantro_reset_fmts(struct hantro_ctx *ctx) 478 { 479 hantro_reset_encoded_fmt(ctx); 480 hantro_reset_raw_fmt(ctx, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC); 481 } 482 483 static void 484 hantro_update_requires_request(struct hantro_ctx *ctx, u32 fourcc) 485 { 486 switch (fourcc) { 487 case V4L2_PIX_FMT_JPEG: 488 ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = false; 489 break; 490 case V4L2_PIX_FMT_MPEG2_SLICE: 491 case V4L2_PIX_FMT_VP8_FRAME: 492 case V4L2_PIX_FMT_H264_SLICE: 493 case V4L2_PIX_FMT_HEVC_SLICE: 494 case V4L2_PIX_FMT_VP9_FRAME: 495 ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = true; 496 break; 497 default: 498 break; 499 } 500 } 501 502 static void 503 hantro_update_requires_hold_capture_buf(struct hantro_ctx *ctx, u32 fourcc) 504 { 505 struct vb2_queue *vq; 506 507 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, 508 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); 509 510 switch (fourcc) { 511 case V4L2_PIX_FMT_JPEG: 512 case V4L2_PIX_FMT_MPEG2_SLICE: 513 case V4L2_PIX_FMT_VP8_FRAME: 514 case V4L2_PIX_FMT_HEVC_SLICE: 515 case V4L2_PIX_FMT_VP9_FRAME: 516 vq->subsystem_flags &= ~(VB2_V4L2_FL_SUPPORTS_M2M_HOLD_CAPTURE_BUF); 517 break; 518 case V4L2_PIX_FMT_H264_SLICE: 519 vq->subsystem_flags |= VB2_V4L2_FL_SUPPORTS_M2M_HOLD_CAPTURE_BUF; 520 break; 521 default: 522 break; 523 } 524 } 525 526 static int hantro_set_fmt_out(struct hantro_ctx *ctx, 527 struct v4l2_pix_format_mplane *pix_mp, 528 bool need_postproc) 529 { 530 struct vb2_queue *vq; 531 int ret; 532 533 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, 534 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); 535 ret = hantro_try_fmt(ctx, pix_mp, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); 536 if (ret) 537 return ret; 538 539 if (!ctx->is_encoder) { 540 /* 541 * In order to support dynamic resolution change, 542 * the decoder admits a resolution change, as long 543 * as the pixelformat remains. 544 */ 545 if (vb2_is_streaming(vq) && pix_mp->pixelformat != ctx->src_fmt.pixelformat) { 546 return -EBUSY; 547 } 548 } else { 549 /* 550 * The encoder doesn't admit a format change if 551 * there are OUTPUT buffers allocated. 552 */ 553 if (vb2_is_busy(vq)) 554 return -EBUSY; 555 } 556 557 ctx->vpu_src_fmt = hantro_find_format(ctx, pix_mp->pixelformat); 558 ctx->src_fmt = *pix_mp; 559 560 /* 561 * Current raw format might have become invalid with newly 562 * selected codec, so reset it to default just to be safe and 563 * keep internal driver state sane. User is mandated to set 564 * the raw format again after we return, so we don't need 565 * anything smarter. 566 * Note that hantro_reset_raw_fmt() also propagates size 567 * changes to the raw format. 568 */ 569 if (!ctx->is_encoder) 570 hantro_reset_raw_fmt(ctx, 571 hantro_get_format_depth(pix_mp->pixelformat), 572 need_postproc); 573 574 /* Colorimetry information are always propagated. */ 575 ctx->dst_fmt.colorspace = pix_mp->colorspace; 576 ctx->dst_fmt.ycbcr_enc = pix_mp->ycbcr_enc; 577 ctx->dst_fmt.xfer_func = pix_mp->xfer_func; 578 ctx->dst_fmt.quantization = pix_mp->quantization; 579 580 hantro_update_requires_request(ctx, pix_mp->pixelformat); 581 hantro_update_requires_hold_capture_buf(ctx, pix_mp->pixelformat); 582 583 vpu_debug(0, "OUTPUT codec mode: %d\n", ctx->vpu_src_fmt->codec_mode); 584 vpu_debug(0, "fmt - w: %d, h: %d\n", 585 pix_mp->width, pix_mp->height); 586 return 0; 587 } 588 589 static int hantro_set_fmt_cap(struct hantro_ctx *ctx, 590 struct v4l2_pix_format_mplane *pix_mp) 591 { 592 int ret; 593 594 if (ctx->is_encoder) { 595 struct vb2_queue *peer_vq; 596 597 /* 598 * Since format change on the CAPTURE queue will reset 599 * the OUTPUT queue, we can't allow doing so 600 * when the OUTPUT queue has buffers allocated. 601 */ 602 peer_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, 603 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); 604 if (vb2_is_busy(peer_vq) && 605 (pix_mp->pixelformat != ctx->dst_fmt.pixelformat || 606 pix_mp->height != ctx->dst_fmt.height || 607 pix_mp->width != ctx->dst_fmt.width)) 608 return -EBUSY; 609 } 610 611 ret = hantro_try_fmt(ctx, pix_mp, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); 612 if (ret) 613 return ret; 614 615 ctx->vpu_dst_fmt = hantro_find_format(ctx, pix_mp->pixelformat); 616 ctx->dst_fmt = *pix_mp; 617 ret = hantro_set_reference_frames_format(ctx); 618 if (ret) 619 return ret; 620 621 /* 622 * Current raw format might have become invalid with newly 623 * selected codec, so reset it to default just to be safe and 624 * keep internal driver state sane. User is mandated to set 625 * the raw format again after we return, so we don't need 626 * anything smarter. 627 * Note that hantro_reset_raw_fmt() also propagates size 628 * changes to the raw format. 629 */ 630 if (ctx->is_encoder) 631 hantro_reset_raw_fmt(ctx, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC); 632 633 /* Colorimetry information are always propagated. */ 634 ctx->src_fmt.colorspace = pix_mp->colorspace; 635 ctx->src_fmt.ycbcr_enc = pix_mp->ycbcr_enc; 636 ctx->src_fmt.xfer_func = pix_mp->xfer_func; 637 ctx->src_fmt.quantization = pix_mp->quantization; 638 639 vpu_debug(0, "CAPTURE codec mode: %d\n", ctx->vpu_dst_fmt->codec_mode); 640 vpu_debug(0, "fmt - w: %d, h: %d\n", 641 pix_mp->width, pix_mp->height); 642 643 hantro_update_requires_request(ctx, pix_mp->pixelformat); 644 645 return 0; 646 } 647 648 static int 649 vidioc_s_fmt_out_mplane(struct file *file, void *priv, struct v4l2_format *f) 650 { 651 return hantro_set_fmt_out(fh_to_ctx(priv), &f->fmt.pix_mp, HANTRO_AUTO_POSTPROC); 652 } 653 654 static int 655 vidioc_s_fmt_cap_mplane(struct file *file, void *priv, struct v4l2_format *f) 656 { 657 return hantro_set_fmt_cap(fh_to_ctx(priv), &f->fmt.pix_mp); 658 } 659 660 static int vidioc_g_selection(struct file *file, void *priv, 661 struct v4l2_selection *sel) 662 { 663 struct hantro_ctx *ctx = fh_to_ctx(priv); 664 665 /* Crop only supported on source. */ 666 if (!ctx->is_encoder || 667 sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) 668 return -EINVAL; 669 670 switch (sel->target) { 671 case V4L2_SEL_TGT_CROP_DEFAULT: 672 case V4L2_SEL_TGT_CROP_BOUNDS: 673 sel->r.top = 0; 674 sel->r.left = 0; 675 sel->r.width = ctx->src_fmt.width; 676 sel->r.height = ctx->src_fmt.height; 677 break; 678 case V4L2_SEL_TGT_CROP: 679 sel->r.top = 0; 680 sel->r.left = 0; 681 sel->r.width = ctx->dst_fmt.width; 682 sel->r.height = ctx->dst_fmt.height; 683 break; 684 default: 685 return -EINVAL; 686 } 687 688 return 0; 689 } 690 691 static int vidioc_s_selection(struct file *file, void *priv, 692 struct v4l2_selection *sel) 693 { 694 struct hantro_ctx *ctx = fh_to_ctx(priv); 695 struct v4l2_rect *rect = &sel->r; 696 struct vb2_queue *vq; 697 698 /* Crop only supported on source. */ 699 if (!ctx->is_encoder || 700 sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT) 701 return -EINVAL; 702 703 /* Change not allowed if the queue is streaming. */ 704 vq = v4l2_m2m_get_src_vq(ctx->fh.m2m_ctx); 705 if (vb2_is_streaming(vq)) 706 return -EBUSY; 707 708 if (sel->target != V4L2_SEL_TGT_CROP) 709 return -EINVAL; 710 711 /* 712 * We do not support offsets, and we can crop only inside 713 * right-most or bottom-most macroblocks. 714 */ 715 if (rect->left != 0 || rect->top != 0 || 716 round_up(rect->width, MB_DIM) != ctx->src_fmt.width || 717 round_up(rect->height, MB_DIM) != ctx->src_fmt.height) { 718 /* Default to full frame for incorrect settings. */ 719 rect->left = 0; 720 rect->top = 0; 721 rect->width = ctx->src_fmt.width; 722 rect->height = ctx->src_fmt.height; 723 } else { 724 /* We support widths aligned to 4 pixels and arbitrary heights. */ 725 rect->width = round_up(rect->width, 4); 726 } 727 728 ctx->dst_fmt.width = rect->width; 729 ctx->dst_fmt.height = rect->height; 730 731 return 0; 732 } 733 734 static const struct v4l2_event hantro_eos_event = { 735 .type = V4L2_EVENT_EOS 736 }; 737 738 static int vidioc_encoder_cmd(struct file *file, void *priv, 739 struct v4l2_encoder_cmd *ec) 740 { 741 struct hantro_ctx *ctx = fh_to_ctx(priv); 742 int ret; 743 744 ret = v4l2_m2m_ioctl_try_encoder_cmd(file, priv, ec); 745 if (ret < 0) 746 return ret; 747 748 if (!vb2_is_streaming(v4l2_m2m_get_src_vq(ctx->fh.m2m_ctx)) || 749 !vb2_is_streaming(v4l2_m2m_get_dst_vq(ctx->fh.m2m_ctx))) 750 return 0; 751 752 ret = v4l2_m2m_ioctl_encoder_cmd(file, priv, ec); 753 if (ret < 0) 754 return ret; 755 756 if (ec->cmd == V4L2_ENC_CMD_STOP && 757 v4l2_m2m_has_stopped(ctx->fh.m2m_ctx)) 758 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event); 759 760 if (ec->cmd == V4L2_ENC_CMD_START) 761 vb2_clear_last_buffer_dequeued(&ctx->fh.m2m_ctx->cap_q_ctx.q); 762 763 return 0; 764 } 765 766 const struct v4l2_ioctl_ops hantro_ioctl_ops = { 767 .vidioc_querycap = vidioc_querycap, 768 .vidioc_enum_framesizes = vidioc_enum_framesizes, 769 770 .vidioc_try_fmt_vid_cap_mplane = vidioc_try_fmt_cap_mplane, 771 .vidioc_try_fmt_vid_out_mplane = vidioc_try_fmt_out_mplane, 772 .vidioc_s_fmt_vid_out_mplane = vidioc_s_fmt_out_mplane, 773 .vidioc_s_fmt_vid_cap_mplane = vidioc_s_fmt_cap_mplane, 774 .vidioc_g_fmt_vid_out_mplane = vidioc_g_fmt_out_mplane, 775 .vidioc_g_fmt_vid_cap_mplane = vidioc_g_fmt_cap_mplane, 776 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt_vid_out, 777 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap, 778 779 .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs, 780 .vidioc_querybuf = v4l2_m2m_ioctl_querybuf, 781 .vidioc_qbuf = v4l2_m2m_ioctl_qbuf, 782 .vidioc_dqbuf = v4l2_m2m_ioctl_dqbuf, 783 .vidioc_prepare_buf = v4l2_m2m_ioctl_prepare_buf, 784 .vidioc_create_bufs = v4l2_m2m_ioctl_create_bufs, 785 .vidioc_remove_bufs = v4l2_m2m_ioctl_remove_bufs, 786 .vidioc_expbuf = v4l2_m2m_ioctl_expbuf, 787 788 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event, 789 .vidioc_unsubscribe_event = v4l2_event_unsubscribe, 790 791 .vidioc_streamon = v4l2_m2m_ioctl_streamon, 792 .vidioc_streamoff = v4l2_m2m_ioctl_streamoff, 793 794 .vidioc_g_selection = vidioc_g_selection, 795 .vidioc_s_selection = vidioc_s_selection, 796 797 .vidioc_decoder_cmd = v4l2_m2m_ioctl_stateless_decoder_cmd, 798 .vidioc_try_decoder_cmd = v4l2_m2m_ioctl_stateless_try_decoder_cmd, 799 800 .vidioc_try_encoder_cmd = v4l2_m2m_ioctl_try_encoder_cmd, 801 .vidioc_encoder_cmd = vidioc_encoder_cmd, 802 }; 803 804 static int 805 hantro_queue_setup(struct vb2_queue *vq, unsigned int *num_buffers, 806 unsigned int *num_planes, unsigned int sizes[], 807 struct device *alloc_devs[]) 808 { 809 struct hantro_ctx *ctx = vb2_get_drv_priv(vq); 810 struct v4l2_pix_format_mplane *pixfmt; 811 int i; 812 813 switch (vq->type) { 814 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: 815 pixfmt = &ctx->dst_fmt; 816 break; 817 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: 818 pixfmt = &ctx->src_fmt; 819 break; 820 default: 821 vpu_err("invalid queue type: %d\n", vq->type); 822 return -EINVAL; 823 } 824 825 if (*num_planes) { 826 if (*num_planes != pixfmt->num_planes) 827 return -EINVAL; 828 for (i = 0; i < pixfmt->num_planes; ++i) 829 if (sizes[i] < pixfmt->plane_fmt[i].sizeimage) 830 return -EINVAL; 831 return 0; 832 } 833 834 *num_planes = pixfmt->num_planes; 835 for (i = 0; i < pixfmt->num_planes; ++i) 836 sizes[i] = pixfmt->plane_fmt[i].sizeimage; 837 return 0; 838 } 839 840 static int 841 hantro_buf_plane_check(struct vb2_buffer *vb, 842 struct v4l2_pix_format_mplane *pixfmt) 843 { 844 unsigned int sz; 845 int i; 846 847 for (i = 0; i < pixfmt->num_planes; ++i) { 848 sz = pixfmt->plane_fmt[i].sizeimage; 849 vpu_debug(4, "plane %d size: %ld, sizeimage: %u\n", 850 i, vb2_plane_size(vb, i), sz); 851 if (vb2_plane_size(vb, i) < sz) { 852 vpu_err("plane %d is too small for output\n", i); 853 return -EINVAL; 854 } 855 } 856 return 0; 857 } 858 859 static int hantro_buf_prepare(struct vb2_buffer *vb) 860 { 861 struct vb2_queue *vq = vb->vb2_queue; 862 struct hantro_ctx *ctx = vb2_get_drv_priv(vq); 863 struct v4l2_pix_format_mplane *pix_fmt; 864 int ret; 865 866 if (V4L2_TYPE_IS_OUTPUT(vq->type)) 867 pix_fmt = &ctx->src_fmt; 868 else 869 pix_fmt = &ctx->dst_fmt; 870 ret = hantro_buf_plane_check(vb, pix_fmt); 871 if (ret) 872 return ret; 873 /* 874 * Buffer's bytesused must be written by driver for CAPTURE buffers. 875 * (for OUTPUT buffers, if userspace passes 0 bytesused, v4l2-core sets 876 * it to buffer length). 877 */ 878 if (V4L2_TYPE_IS_CAPTURE(vq->type)) { 879 if (ctx->is_encoder) 880 vb2_set_plane_payload(vb, 0, 0); 881 else 882 vb2_set_plane_payload(vb, 0, pix_fmt->plane_fmt[0].sizeimage); 883 } 884 885 return 0; 886 } 887 888 static void hantro_buf_queue(struct vb2_buffer *vb) 889 { 890 struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 891 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 892 893 if (V4L2_TYPE_IS_CAPTURE(vb->vb2_queue->type) && 894 vb2_is_streaming(vb->vb2_queue) && 895 v4l2_m2m_dst_buf_is_last(ctx->fh.m2m_ctx)) { 896 unsigned int i; 897 898 for (i = 0; i < vb->num_planes; i++) 899 vb2_set_plane_payload(vb, i, 0); 900 901 vbuf->field = V4L2_FIELD_NONE; 902 vbuf->sequence = ctx->sequence_cap++; 903 904 v4l2_m2m_last_buffer_done(ctx->fh.m2m_ctx, vbuf); 905 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event); 906 return; 907 } 908 909 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf); 910 } 911 912 static bool hantro_vq_is_coded(struct vb2_queue *q) 913 { 914 struct hantro_ctx *ctx = vb2_get_drv_priv(q); 915 916 return ctx->is_encoder != V4L2_TYPE_IS_OUTPUT(q->type); 917 } 918 919 static int hantro_start_streaming(struct vb2_queue *q, unsigned int count) 920 { 921 struct hantro_ctx *ctx = vb2_get_drv_priv(q); 922 int ret = 0; 923 924 v4l2_m2m_update_start_streaming_state(ctx->fh.m2m_ctx, q); 925 926 if (V4L2_TYPE_IS_OUTPUT(q->type)) 927 ctx->sequence_out = 0; 928 else 929 ctx->sequence_cap = 0; 930 931 if (hantro_vq_is_coded(q)) { 932 enum hantro_codec_mode codec_mode; 933 934 if (V4L2_TYPE_IS_OUTPUT(q->type)) 935 codec_mode = ctx->vpu_src_fmt->codec_mode; 936 else 937 codec_mode = ctx->vpu_dst_fmt->codec_mode; 938 939 vpu_debug(4, "Codec mode = %d\n", codec_mode); 940 ctx->codec_ops = &ctx->dev->variant->codec_ops[codec_mode]; 941 if (ctx->codec_ops->init) { 942 ret = ctx->codec_ops->init(ctx); 943 if (ret) 944 return ret; 945 } 946 947 if (hantro_needs_postproc(ctx, ctx->vpu_dst_fmt)) { 948 ret = hantro_postproc_init(ctx); 949 if (ret) 950 goto err_codec_exit; 951 } 952 } 953 return ret; 954 955 err_codec_exit: 956 if (ctx->codec_ops->exit) 957 ctx->codec_ops->exit(ctx); 958 return ret; 959 } 960 961 static void 962 hantro_return_bufs(struct vb2_queue *q, 963 struct vb2_v4l2_buffer *(*buf_remove)(struct v4l2_m2m_ctx *)) 964 { 965 struct hantro_ctx *ctx = vb2_get_drv_priv(q); 966 967 for (;;) { 968 struct vb2_v4l2_buffer *vbuf; 969 970 vbuf = buf_remove(ctx->fh.m2m_ctx); 971 if (!vbuf) 972 break; 973 v4l2_ctrl_request_complete(vbuf->vb2_buf.req_obj.req, 974 &ctx->ctrl_handler); 975 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR); 976 } 977 } 978 979 static void hantro_stop_streaming(struct vb2_queue *q) 980 { 981 struct hantro_ctx *ctx = vb2_get_drv_priv(q); 982 983 if (hantro_vq_is_coded(q)) { 984 hantro_postproc_free(ctx); 985 if (ctx->codec_ops && ctx->codec_ops->exit) 986 ctx->codec_ops->exit(ctx); 987 } 988 989 /* 990 * The mem2mem framework calls v4l2_m2m_cancel_job before 991 * .stop_streaming, so there isn't any job running and 992 * it is safe to return all the buffers. 993 */ 994 if (V4L2_TYPE_IS_OUTPUT(q->type)) 995 hantro_return_bufs(q, v4l2_m2m_src_buf_remove); 996 else 997 hantro_return_bufs(q, v4l2_m2m_dst_buf_remove); 998 999 v4l2_m2m_update_stop_streaming_state(ctx->fh.m2m_ctx, q); 1000 1001 if (V4L2_TYPE_IS_OUTPUT(q->type) && 1002 v4l2_m2m_has_stopped(ctx->fh.m2m_ctx)) 1003 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event); 1004 } 1005 1006 static void hantro_buf_request_complete(struct vb2_buffer *vb) 1007 { 1008 struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); 1009 1010 v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->ctrl_handler); 1011 } 1012 1013 static int hantro_buf_out_validate(struct vb2_buffer *vb) 1014 { 1015 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 1016 1017 vbuf->field = V4L2_FIELD_NONE; 1018 return 0; 1019 } 1020 1021 const struct vb2_ops hantro_queue_ops = { 1022 .queue_setup = hantro_queue_setup, 1023 .buf_prepare = hantro_buf_prepare, 1024 .buf_queue = hantro_buf_queue, 1025 .buf_out_validate = hantro_buf_out_validate, 1026 .buf_request_complete = hantro_buf_request_complete, 1027 .start_streaming = hantro_start_streaming, 1028 .stop_streaming = hantro_stop_streaming, 1029 }; 1030