1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Hantro VPU codec driver
4 *
5 * Copyright (C) 2018 Collabora, Ltd.
6 * Copyright (C) 2018 Rockchip Electronics Co., Ltd.
7 * Alpha Lin <Alpha.Lin@rock-chips.com>
8 * Jeffy Chen <jeffy.chen@rock-chips.com>
9 *
10 * Copyright 2018 Google LLC.
11 * Tomasz Figa <tfiga@chromium.org>
12 *
13 * Based on s5p-mfc driver by Samsung Electronics Co., Ltd.
14 * Copyright (C) 2010-2011 Samsung Electronics Co., Ltd.
15 */
16
17 #include <linux/interrupt.h>
18 #include <linux/io.h>
19 #include <linux/module.h>
20 #include <linux/pm_runtime.h>
21 #include <linux/videodev2.h>
22 #include <linux/workqueue.h>
23 #include <media/v4l2-ctrls.h>
24 #include <media/v4l2-event.h>
25 #include <media/v4l2-mem2mem.h>
26
27 #include "hantro.h"
28 #include "hantro_hw.h"
29 #include "hantro_v4l2.h"
30
31 #define HANTRO_DEFAULT_BIT_DEPTH 8
32
33 static int hantro_set_fmt_out(struct hantro_ctx *ctx,
34 struct v4l2_pix_format_mplane *pix_mp,
35 bool need_postproc);
36 static int hantro_set_fmt_cap(struct hantro_ctx *ctx,
37 struct v4l2_pix_format_mplane *pix_mp);
38
39 static const struct hantro_fmt *
hantro_get_formats(const struct hantro_ctx * ctx,unsigned int * num_fmts,bool need_postproc)40 hantro_get_formats(const struct hantro_ctx *ctx, unsigned int *num_fmts, bool need_postproc)
41 {
42 const struct hantro_fmt *formats;
43
44 if (need_postproc) {
45 *num_fmts = 0;
46 return NULL;
47 }
48
49 if (ctx->is_encoder) {
50 formats = ctx->dev->variant->enc_fmts;
51 *num_fmts = ctx->dev->variant->num_enc_fmts;
52 } else {
53 formats = ctx->dev->variant->dec_fmts;
54 *num_fmts = ctx->dev->variant->num_dec_fmts;
55 }
56
57 return formats;
58 }
59
60 static const struct hantro_fmt *
hantro_get_postproc_formats(const struct hantro_ctx * ctx,unsigned int * num_fmts)61 hantro_get_postproc_formats(const struct hantro_ctx *ctx,
62 unsigned int *num_fmts)
63 {
64 struct hantro_dev *vpu = ctx->dev;
65
66 if (ctx->is_encoder || !vpu->variant->postproc_fmts) {
67 *num_fmts = 0;
68 return NULL;
69 }
70
71 *num_fmts = ctx->dev->variant->num_postproc_fmts;
72 return ctx->dev->variant->postproc_fmts;
73 }
74
hantro_get_format_depth(u32 fourcc)75 int hantro_get_format_depth(u32 fourcc)
76 {
77 switch (fourcc) {
78 case V4L2_PIX_FMT_P010:
79 case V4L2_PIX_FMT_P010_4L4:
80 case V4L2_PIX_FMT_NV15_4L4:
81 return 10;
82 default:
83 return 8;
84 }
85 }
86
87 static bool
hantro_check_depth_match(const struct hantro_fmt * fmt,int bit_depth)88 hantro_check_depth_match(const struct hantro_fmt *fmt, int bit_depth)
89 {
90 int fmt_depth;
91
92 if (!fmt->match_depth && !fmt->postprocessed)
93 return true;
94
95 /* 0 means default depth, which is 8 */
96 if (!bit_depth)
97 bit_depth = HANTRO_DEFAULT_BIT_DEPTH;
98
99 fmt_depth = hantro_get_format_depth(fmt->fourcc);
100
101 /*
102 * Allow only downconversion for postproc formats for now.
103 * It may be possible to relax that on some HW.
104 */
105 if (!fmt->match_depth)
106 return fmt_depth <= bit_depth;
107
108 return fmt_depth == bit_depth;
109 }
110
111 static const struct hantro_fmt *
hantro_find_format(const struct hantro_ctx * ctx,u32 fourcc)112 hantro_find_format(const struct hantro_ctx *ctx, u32 fourcc)
113 {
114 const struct hantro_fmt *formats;
115 unsigned int i, num_fmts;
116
117 formats = hantro_get_formats(ctx, &num_fmts, HANTRO_AUTO_POSTPROC);
118 for (i = 0; i < num_fmts; i++)
119 if (formats[i].fourcc == fourcc)
120 return &formats[i];
121
122 formats = hantro_get_postproc_formats(ctx, &num_fmts);
123 for (i = 0; i < num_fmts; i++)
124 if (formats[i].fourcc == fourcc)
125 return &formats[i];
126 return NULL;
127 }
128
129 const struct hantro_fmt *
hantro_get_default_fmt(const struct hantro_ctx * ctx,bool bitstream,int bit_depth,bool need_postproc)130 hantro_get_default_fmt(const struct hantro_ctx *ctx, bool bitstream,
131 int bit_depth, bool need_postproc)
132 {
133 const struct hantro_fmt *formats;
134 unsigned int i, num_fmts;
135
136 formats = hantro_get_formats(ctx, &num_fmts, need_postproc);
137 for (i = 0; i < num_fmts; i++) {
138 if (bitstream == (formats[i].codec_mode !=
139 HANTRO_MODE_NONE) &&
140 hantro_check_depth_match(&formats[i], bit_depth))
141 return &formats[i];
142 }
143
144 formats = hantro_get_postproc_formats(ctx, &num_fmts);
145 for (i = 0; i < num_fmts; i++) {
146 if (bitstream == (formats[i].codec_mode !=
147 HANTRO_MODE_NONE) &&
148 hantro_check_depth_match(&formats[i], bit_depth))
149 return &formats[i];
150 }
151
152 return NULL;
153 }
154
vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)155 static int vidioc_querycap(struct file *file, void *priv,
156 struct v4l2_capability *cap)
157 {
158 struct hantro_dev *vpu = video_drvdata(file);
159 struct video_device *vdev = video_devdata(file);
160
161 strscpy(cap->driver, vpu->dev->driver->name, sizeof(cap->driver));
162 strscpy(cap->card, vdev->name, sizeof(cap->card));
163 return 0;
164 }
165
vidioc_enum_framesizes(struct file * file,void * priv,struct v4l2_frmsizeenum * fsize)166 static int vidioc_enum_framesizes(struct file *file, void *priv,
167 struct v4l2_frmsizeenum *fsize)
168 {
169 struct hantro_ctx *ctx = fh_to_ctx(priv);
170 const struct hantro_fmt *fmt;
171
172 fmt = hantro_find_format(ctx, fsize->pixel_format);
173 if (!fmt) {
174 vpu_debug(0, "unsupported bitstream format (%08x)\n",
175 fsize->pixel_format);
176 return -EINVAL;
177 }
178
179 /* For non-coded formats check if postprocessing scaling is possible */
180 if (fmt->codec_mode == HANTRO_MODE_NONE) {
181 if (hantro_needs_postproc(ctx, fmt))
182 return hanto_postproc_enum_framesizes(ctx, fsize);
183 else
184 return -ENOTTY;
185 } else if (fsize->index != 0) {
186 vpu_debug(0, "invalid frame size index (expected 0, got %d)\n",
187 fsize->index);
188 return -EINVAL;
189 }
190
191 fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
192 fsize->stepwise = fmt->frmsize;
193
194 return 0;
195 }
196
vidioc_enum_fmt(struct file * file,void * priv,struct v4l2_fmtdesc * f,bool capture)197 static int vidioc_enum_fmt(struct file *file, void *priv,
198 struct v4l2_fmtdesc *f, bool capture)
199
200 {
201 struct hantro_ctx *ctx = fh_to_ctx(priv);
202 const struct hantro_fmt *fmt, *formats;
203 unsigned int num_fmts, i, j = 0;
204 bool skip_mode_none;
205
206 /*
207 * When dealing with an encoder:
208 * - on the capture side we want to filter out all MODE_NONE formats.
209 * - on the output side we want to filter out all formats that are
210 * not MODE_NONE.
211 * When dealing with a decoder:
212 * - on the capture side we want to filter out all formats that are
213 * not MODE_NONE.
214 * - on the output side we want to filter out all MODE_NONE formats.
215 */
216 skip_mode_none = capture == ctx->is_encoder;
217
218 formats = hantro_get_formats(ctx, &num_fmts, HANTRO_AUTO_POSTPROC);
219 for (i = 0; i < num_fmts; i++) {
220 bool mode_none = formats[i].codec_mode == HANTRO_MODE_NONE;
221 fmt = &formats[i];
222
223 if (skip_mode_none == mode_none)
224 continue;
225 if (!hantro_check_depth_match(fmt, ctx->bit_depth))
226 continue;
227 if (j == f->index) {
228 f->pixelformat = fmt->fourcc;
229 return 0;
230 }
231 ++j;
232 }
233
234 /*
235 * Enumerate post-processed formats. As per the specification,
236 * we enumerated these formats after natively decoded formats
237 * as a hint for applications on what's the preferred fomat.
238 */
239 if (!capture)
240 return -EINVAL;
241 formats = hantro_get_postproc_formats(ctx, &num_fmts);
242 for (i = 0; i < num_fmts; i++) {
243 fmt = &formats[i];
244
245 if (!hantro_check_depth_match(fmt, ctx->bit_depth))
246 continue;
247 if (j == f->index) {
248 f->pixelformat = fmt->fourcc;
249 return 0;
250 }
251 ++j;
252 }
253
254 return -EINVAL;
255 }
256
vidioc_enum_fmt_vid_cap(struct file * file,void * priv,struct v4l2_fmtdesc * f)257 static int vidioc_enum_fmt_vid_cap(struct file *file, void *priv,
258 struct v4l2_fmtdesc *f)
259 {
260 return vidioc_enum_fmt(file, priv, f, true);
261 }
262
vidioc_enum_fmt_vid_out(struct file * file,void * priv,struct v4l2_fmtdesc * f)263 static int vidioc_enum_fmt_vid_out(struct file *file, void *priv,
264 struct v4l2_fmtdesc *f)
265 {
266 return vidioc_enum_fmt(file, priv, f, false);
267 }
268
vidioc_g_fmt_out_mplane(struct file * file,void * priv,struct v4l2_format * f)269 static int vidioc_g_fmt_out_mplane(struct file *file, void *priv,
270 struct v4l2_format *f)
271 {
272 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
273 struct hantro_ctx *ctx = fh_to_ctx(priv);
274
275 vpu_debug(4, "f->type = %d\n", f->type);
276
277 *pix_mp = ctx->src_fmt;
278
279 return 0;
280 }
281
vidioc_g_fmt_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)282 static int vidioc_g_fmt_cap_mplane(struct file *file, void *priv,
283 struct v4l2_format *f)
284 {
285 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
286 struct hantro_ctx *ctx = fh_to_ctx(priv);
287
288 vpu_debug(4, "f->type = %d\n", f->type);
289
290 *pix_mp = ctx->dst_fmt;
291
292 return 0;
293 }
294
hantro_try_fmt(const struct hantro_ctx * ctx,struct v4l2_pix_format_mplane * pix_mp,enum v4l2_buf_type type)295 static int hantro_try_fmt(const struct hantro_ctx *ctx,
296 struct v4l2_pix_format_mplane *pix_mp,
297 enum v4l2_buf_type type)
298 {
299 const struct hantro_fmt *fmt;
300 const struct hantro_fmt *vpu_fmt;
301 bool capture = V4L2_TYPE_IS_CAPTURE(type);
302 bool coded;
303
304 coded = capture == ctx->is_encoder;
305
306 vpu_debug(4, "trying format %p4cc\n", &pix_mp->pixelformat);
307
308 fmt = hantro_find_format(ctx, pix_mp->pixelformat);
309 if (!fmt) {
310 fmt = hantro_get_default_fmt(ctx, coded, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
311 pix_mp->pixelformat = fmt->fourcc;
312 }
313
314 if (coded) {
315 pix_mp->num_planes = 1;
316 vpu_fmt = fmt;
317 } else if (ctx->is_encoder) {
318 vpu_fmt = hantro_find_format(ctx, ctx->dst_fmt.pixelformat);
319 } else {
320 /*
321 * Width/height on the CAPTURE end of a decoder are ignored and
322 * replaced by the OUTPUT ones.
323 */
324 pix_mp->width = ctx->src_fmt.width;
325 pix_mp->height = ctx->src_fmt.height;
326 vpu_fmt = fmt;
327 }
328
329 pix_mp->field = V4L2_FIELD_NONE;
330
331 v4l2_apply_frmsize_constraints(&pix_mp->width, &pix_mp->height,
332 &vpu_fmt->frmsize);
333
334 if (!coded) {
335 /* Fill remaining fields */
336 v4l2_fill_pixfmt_mp(pix_mp, fmt->fourcc, pix_mp->width,
337 pix_mp->height);
338 if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_H264_SLICE &&
339 !hantro_needs_postproc(ctx, fmt))
340 pix_mp->plane_fmt[0].sizeimage +=
341 hantro_h264_mv_size(pix_mp->width,
342 pix_mp->height);
343 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_VP9_FRAME &&
344 !hantro_needs_postproc(ctx, fmt))
345 pix_mp->plane_fmt[0].sizeimage +=
346 hantro_vp9_mv_size(pix_mp->width,
347 pix_mp->height);
348 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_HEVC_SLICE &&
349 !hantro_needs_postproc(ctx, fmt))
350 pix_mp->plane_fmt[0].sizeimage +=
351 hantro_hevc_mv_size(pix_mp->width,
352 pix_mp->height);
353 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_AV1_FRAME &&
354 !hantro_needs_postproc(ctx, fmt))
355 pix_mp->plane_fmt[0].sizeimage +=
356 hantro_av1_mv_size(pix_mp->width,
357 pix_mp->height);
358 } else if (!pix_mp->plane_fmt[0].sizeimage) {
359 /*
360 * For coded formats the application can specify
361 * sizeimage. If the application passes a zero sizeimage,
362 * let's default to the maximum frame size.
363 */
364 pix_mp->plane_fmt[0].sizeimage = fmt->header_size +
365 pix_mp->width * pix_mp->height * fmt->max_depth;
366 }
367
368 return 0;
369 }
370
vidioc_try_fmt_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)371 static int vidioc_try_fmt_cap_mplane(struct file *file, void *priv,
372 struct v4l2_format *f)
373 {
374 return hantro_try_fmt(fh_to_ctx(priv), &f->fmt.pix_mp, f->type);
375 }
376
vidioc_try_fmt_out_mplane(struct file * file,void * priv,struct v4l2_format * f)377 static int vidioc_try_fmt_out_mplane(struct file *file, void *priv,
378 struct v4l2_format *f)
379 {
380 return hantro_try_fmt(fh_to_ctx(priv), &f->fmt.pix_mp, f->type);
381 }
382
383 static void
hantro_reset_fmt(struct v4l2_pix_format_mplane * fmt,const struct hantro_fmt * vpu_fmt)384 hantro_reset_fmt(struct v4l2_pix_format_mplane *fmt,
385 const struct hantro_fmt *vpu_fmt)
386 {
387 memset(fmt, 0, sizeof(*fmt));
388
389 fmt->pixelformat = vpu_fmt->fourcc;
390 fmt->field = V4L2_FIELD_NONE;
391 fmt->colorspace = V4L2_COLORSPACE_JPEG;
392 fmt->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
393 fmt->quantization = V4L2_QUANTIZATION_DEFAULT;
394 fmt->xfer_func = V4L2_XFER_FUNC_DEFAULT;
395 }
396
397 static void
hantro_reset_encoded_fmt(struct hantro_ctx * ctx)398 hantro_reset_encoded_fmt(struct hantro_ctx *ctx)
399 {
400 const struct hantro_fmt *vpu_fmt;
401 struct v4l2_pix_format_mplane fmt;
402
403 vpu_fmt = hantro_get_default_fmt(ctx, true, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
404 if (!vpu_fmt)
405 return;
406
407 hantro_reset_fmt(&fmt, vpu_fmt);
408 fmt.width = vpu_fmt->frmsize.min_width;
409 fmt.height = vpu_fmt->frmsize.min_height;
410 if (ctx->is_encoder)
411 hantro_set_fmt_cap(ctx, &fmt);
412 else
413 hantro_set_fmt_out(ctx, &fmt, HANTRO_AUTO_POSTPROC);
414 }
415
416 int
hantro_reset_raw_fmt(struct hantro_ctx * ctx,int bit_depth,bool need_postproc)417 hantro_reset_raw_fmt(struct hantro_ctx *ctx, int bit_depth, bool need_postproc)
418 {
419 const struct hantro_fmt *raw_vpu_fmt;
420 struct v4l2_pix_format_mplane raw_fmt, *encoded_fmt;
421 int ret;
422
423 raw_vpu_fmt = hantro_get_default_fmt(ctx, false, bit_depth, need_postproc);
424 if (!raw_vpu_fmt)
425 return -EINVAL;
426
427 if (ctx->is_encoder) {
428 encoded_fmt = &ctx->dst_fmt;
429 ctx->vpu_src_fmt = raw_vpu_fmt;
430 } else {
431 encoded_fmt = &ctx->src_fmt;
432 }
433
434 hantro_reset_fmt(&raw_fmt, raw_vpu_fmt);
435 raw_fmt.width = encoded_fmt->width;
436 raw_fmt.height = encoded_fmt->height;
437 if (ctx->is_encoder)
438 ret = hantro_set_fmt_out(ctx, &raw_fmt, need_postproc);
439 else
440 ret = hantro_set_fmt_cap(ctx, &raw_fmt);
441
442 if (!ret) {
443 ctx->bit_depth = bit_depth;
444 ctx->need_postproc = need_postproc;
445 }
446
447 return ret;
448 }
449
hantro_reset_fmts(struct hantro_ctx * ctx)450 void hantro_reset_fmts(struct hantro_ctx *ctx)
451 {
452 hantro_reset_encoded_fmt(ctx);
453 hantro_reset_raw_fmt(ctx, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
454 }
455
456 static void
hantro_update_requires_request(struct hantro_ctx * ctx,u32 fourcc)457 hantro_update_requires_request(struct hantro_ctx *ctx, u32 fourcc)
458 {
459 switch (fourcc) {
460 case V4L2_PIX_FMT_JPEG:
461 ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = false;
462 break;
463 case V4L2_PIX_FMT_MPEG2_SLICE:
464 case V4L2_PIX_FMT_VP8_FRAME:
465 case V4L2_PIX_FMT_H264_SLICE:
466 case V4L2_PIX_FMT_HEVC_SLICE:
467 case V4L2_PIX_FMT_VP9_FRAME:
468 ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = true;
469 break;
470 default:
471 break;
472 }
473 }
474
475 static void
hantro_update_requires_hold_capture_buf(struct hantro_ctx * ctx,u32 fourcc)476 hantro_update_requires_hold_capture_buf(struct hantro_ctx *ctx, u32 fourcc)
477 {
478 struct vb2_queue *vq;
479
480 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
481 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
482
483 switch (fourcc) {
484 case V4L2_PIX_FMT_JPEG:
485 case V4L2_PIX_FMT_MPEG2_SLICE:
486 case V4L2_PIX_FMT_VP8_FRAME:
487 case V4L2_PIX_FMT_HEVC_SLICE:
488 case V4L2_PIX_FMT_VP9_FRAME:
489 vq->subsystem_flags &= ~(VB2_V4L2_FL_SUPPORTS_M2M_HOLD_CAPTURE_BUF);
490 break;
491 case V4L2_PIX_FMT_H264_SLICE:
492 vq->subsystem_flags |= VB2_V4L2_FL_SUPPORTS_M2M_HOLD_CAPTURE_BUF;
493 break;
494 default:
495 break;
496 }
497 }
498
hantro_set_fmt_out(struct hantro_ctx * ctx,struct v4l2_pix_format_mplane * pix_mp,bool need_postproc)499 static int hantro_set_fmt_out(struct hantro_ctx *ctx,
500 struct v4l2_pix_format_mplane *pix_mp,
501 bool need_postproc)
502 {
503 struct vb2_queue *vq;
504 int ret;
505
506 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
507 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
508 ret = hantro_try_fmt(ctx, pix_mp, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
509 if (ret)
510 return ret;
511
512 if (!ctx->is_encoder) {
513 /*
514 * In order to support dynamic resolution change,
515 * the decoder admits a resolution change, as long
516 * as the pixelformat remains.
517 */
518 if (vb2_is_streaming(vq) && pix_mp->pixelformat != ctx->src_fmt.pixelformat) {
519 return -EBUSY;
520 }
521 } else {
522 /*
523 * The encoder doesn't admit a format change if
524 * there are OUTPUT buffers allocated.
525 */
526 if (vb2_is_busy(vq))
527 return -EBUSY;
528 }
529
530 ctx->vpu_src_fmt = hantro_find_format(ctx, pix_mp->pixelformat);
531 ctx->src_fmt = *pix_mp;
532
533 /*
534 * Current raw format might have become invalid with newly
535 * selected codec, so reset it to default just to be safe and
536 * keep internal driver state sane. User is mandated to set
537 * the raw format again after we return, so we don't need
538 * anything smarter.
539 * Note that hantro_reset_raw_fmt() also propagates size
540 * changes to the raw format.
541 */
542 if (!ctx->is_encoder)
543 hantro_reset_raw_fmt(ctx,
544 hantro_get_format_depth(pix_mp->pixelformat),
545 need_postproc);
546
547 /* Colorimetry information are always propagated. */
548 ctx->dst_fmt.colorspace = pix_mp->colorspace;
549 ctx->dst_fmt.ycbcr_enc = pix_mp->ycbcr_enc;
550 ctx->dst_fmt.xfer_func = pix_mp->xfer_func;
551 ctx->dst_fmt.quantization = pix_mp->quantization;
552
553 hantro_update_requires_request(ctx, pix_mp->pixelformat);
554 hantro_update_requires_hold_capture_buf(ctx, pix_mp->pixelformat);
555
556 vpu_debug(0, "OUTPUT codec mode: %d\n", ctx->vpu_src_fmt->codec_mode);
557 vpu_debug(0, "fmt - w: %d, h: %d\n",
558 pix_mp->width, pix_mp->height);
559 return 0;
560 }
561
hantro_set_fmt_cap(struct hantro_ctx * ctx,struct v4l2_pix_format_mplane * pix_mp)562 static int hantro_set_fmt_cap(struct hantro_ctx *ctx,
563 struct v4l2_pix_format_mplane *pix_mp)
564 {
565 int ret;
566
567 if (ctx->is_encoder) {
568 struct vb2_queue *peer_vq;
569
570 /*
571 * Since format change on the CAPTURE queue will reset
572 * the OUTPUT queue, we can't allow doing so
573 * when the OUTPUT queue has buffers allocated.
574 */
575 peer_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
576 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
577 if (vb2_is_busy(peer_vq) &&
578 (pix_mp->pixelformat != ctx->dst_fmt.pixelformat ||
579 pix_mp->height != ctx->dst_fmt.height ||
580 pix_mp->width != ctx->dst_fmt.width))
581 return -EBUSY;
582 }
583
584 ret = hantro_try_fmt(ctx, pix_mp, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
585 if (ret)
586 return ret;
587
588 ctx->vpu_dst_fmt = hantro_find_format(ctx, pix_mp->pixelformat);
589 ctx->dst_fmt = *pix_mp;
590
591 /*
592 * Current raw format might have become invalid with newly
593 * selected codec, so reset it to default just to be safe and
594 * keep internal driver state sane. User is mandated to set
595 * the raw format again after we return, so we don't need
596 * anything smarter.
597 * Note that hantro_reset_raw_fmt() also propagates size
598 * changes to the raw format.
599 */
600 if (ctx->is_encoder)
601 hantro_reset_raw_fmt(ctx, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
602
603 /* Colorimetry information are always propagated. */
604 ctx->src_fmt.colorspace = pix_mp->colorspace;
605 ctx->src_fmt.ycbcr_enc = pix_mp->ycbcr_enc;
606 ctx->src_fmt.xfer_func = pix_mp->xfer_func;
607 ctx->src_fmt.quantization = pix_mp->quantization;
608
609 vpu_debug(0, "CAPTURE codec mode: %d\n", ctx->vpu_dst_fmt->codec_mode);
610 vpu_debug(0, "fmt - w: %d, h: %d\n",
611 pix_mp->width, pix_mp->height);
612
613 hantro_update_requires_request(ctx, pix_mp->pixelformat);
614
615 return 0;
616 }
617
618 static int
vidioc_s_fmt_out_mplane(struct file * file,void * priv,struct v4l2_format * f)619 vidioc_s_fmt_out_mplane(struct file *file, void *priv, struct v4l2_format *f)
620 {
621 return hantro_set_fmt_out(fh_to_ctx(priv), &f->fmt.pix_mp, HANTRO_AUTO_POSTPROC);
622 }
623
624 static int
vidioc_s_fmt_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)625 vidioc_s_fmt_cap_mplane(struct file *file, void *priv, struct v4l2_format *f)
626 {
627 return hantro_set_fmt_cap(fh_to_ctx(priv), &f->fmt.pix_mp);
628 }
629
vidioc_g_selection(struct file * file,void * priv,struct v4l2_selection * sel)630 static int vidioc_g_selection(struct file *file, void *priv,
631 struct v4l2_selection *sel)
632 {
633 struct hantro_ctx *ctx = fh_to_ctx(priv);
634
635 /* Crop only supported on source. */
636 if (!ctx->is_encoder ||
637 sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
638 return -EINVAL;
639
640 switch (sel->target) {
641 case V4L2_SEL_TGT_CROP_DEFAULT:
642 case V4L2_SEL_TGT_CROP_BOUNDS:
643 sel->r.top = 0;
644 sel->r.left = 0;
645 sel->r.width = ctx->src_fmt.width;
646 sel->r.height = ctx->src_fmt.height;
647 break;
648 case V4L2_SEL_TGT_CROP:
649 sel->r.top = 0;
650 sel->r.left = 0;
651 sel->r.width = ctx->dst_fmt.width;
652 sel->r.height = ctx->dst_fmt.height;
653 break;
654 default:
655 return -EINVAL;
656 }
657
658 return 0;
659 }
660
vidioc_s_selection(struct file * file,void * priv,struct v4l2_selection * sel)661 static int vidioc_s_selection(struct file *file, void *priv,
662 struct v4l2_selection *sel)
663 {
664 struct hantro_ctx *ctx = fh_to_ctx(priv);
665 struct v4l2_rect *rect = &sel->r;
666 struct vb2_queue *vq;
667
668 /* Crop only supported on source. */
669 if (!ctx->is_encoder ||
670 sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
671 return -EINVAL;
672
673 /* Change not allowed if the queue is streaming. */
674 vq = v4l2_m2m_get_src_vq(ctx->fh.m2m_ctx);
675 if (vb2_is_streaming(vq))
676 return -EBUSY;
677
678 if (sel->target != V4L2_SEL_TGT_CROP)
679 return -EINVAL;
680
681 /*
682 * We do not support offsets, and we can crop only inside
683 * right-most or bottom-most macroblocks.
684 */
685 if (rect->left != 0 || rect->top != 0 ||
686 round_up(rect->width, MB_DIM) != ctx->src_fmt.width ||
687 round_up(rect->height, MB_DIM) != ctx->src_fmt.height) {
688 /* Default to full frame for incorrect settings. */
689 rect->left = 0;
690 rect->top = 0;
691 rect->width = ctx->src_fmt.width;
692 rect->height = ctx->src_fmt.height;
693 } else {
694 /* We support widths aligned to 4 pixels and arbitrary heights. */
695 rect->width = round_up(rect->width, 4);
696 }
697
698 ctx->dst_fmt.width = rect->width;
699 ctx->dst_fmt.height = rect->height;
700
701 return 0;
702 }
703
704 static const struct v4l2_event hantro_eos_event = {
705 .type = V4L2_EVENT_EOS
706 };
707
vidioc_encoder_cmd(struct file * file,void * priv,struct v4l2_encoder_cmd * ec)708 static int vidioc_encoder_cmd(struct file *file, void *priv,
709 struct v4l2_encoder_cmd *ec)
710 {
711 struct hantro_ctx *ctx = fh_to_ctx(priv);
712 int ret;
713
714 ret = v4l2_m2m_ioctl_try_encoder_cmd(file, priv, ec);
715 if (ret < 0)
716 return ret;
717
718 if (!vb2_is_streaming(v4l2_m2m_get_src_vq(ctx->fh.m2m_ctx)) ||
719 !vb2_is_streaming(v4l2_m2m_get_dst_vq(ctx->fh.m2m_ctx)))
720 return 0;
721
722 ret = v4l2_m2m_ioctl_encoder_cmd(file, priv, ec);
723 if (ret < 0)
724 return ret;
725
726 if (ec->cmd == V4L2_ENC_CMD_STOP &&
727 v4l2_m2m_has_stopped(ctx->fh.m2m_ctx))
728 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event);
729
730 if (ec->cmd == V4L2_ENC_CMD_START)
731 vb2_clear_last_buffer_dequeued(&ctx->fh.m2m_ctx->cap_q_ctx.q);
732
733 return 0;
734 }
735
736 const struct v4l2_ioctl_ops hantro_ioctl_ops = {
737 .vidioc_querycap = vidioc_querycap,
738 .vidioc_enum_framesizes = vidioc_enum_framesizes,
739
740 .vidioc_try_fmt_vid_cap_mplane = vidioc_try_fmt_cap_mplane,
741 .vidioc_try_fmt_vid_out_mplane = vidioc_try_fmt_out_mplane,
742 .vidioc_s_fmt_vid_out_mplane = vidioc_s_fmt_out_mplane,
743 .vidioc_s_fmt_vid_cap_mplane = vidioc_s_fmt_cap_mplane,
744 .vidioc_g_fmt_vid_out_mplane = vidioc_g_fmt_out_mplane,
745 .vidioc_g_fmt_vid_cap_mplane = vidioc_g_fmt_cap_mplane,
746 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt_vid_out,
747 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap,
748
749 .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs,
750 .vidioc_querybuf = v4l2_m2m_ioctl_querybuf,
751 .vidioc_qbuf = v4l2_m2m_ioctl_qbuf,
752 .vidioc_dqbuf = v4l2_m2m_ioctl_dqbuf,
753 .vidioc_prepare_buf = v4l2_m2m_ioctl_prepare_buf,
754 .vidioc_create_bufs = v4l2_m2m_ioctl_create_bufs,
755 .vidioc_remove_bufs = v4l2_m2m_ioctl_remove_bufs,
756 .vidioc_expbuf = v4l2_m2m_ioctl_expbuf,
757
758 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
759 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
760
761 .vidioc_streamon = v4l2_m2m_ioctl_streamon,
762 .vidioc_streamoff = v4l2_m2m_ioctl_streamoff,
763
764 .vidioc_g_selection = vidioc_g_selection,
765 .vidioc_s_selection = vidioc_s_selection,
766
767 .vidioc_decoder_cmd = v4l2_m2m_ioctl_stateless_decoder_cmd,
768 .vidioc_try_decoder_cmd = v4l2_m2m_ioctl_stateless_try_decoder_cmd,
769
770 .vidioc_try_encoder_cmd = v4l2_m2m_ioctl_try_encoder_cmd,
771 .vidioc_encoder_cmd = vidioc_encoder_cmd,
772 };
773
774 static int
hantro_queue_setup(struct vb2_queue * vq,unsigned int * num_buffers,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])775 hantro_queue_setup(struct vb2_queue *vq, unsigned int *num_buffers,
776 unsigned int *num_planes, unsigned int sizes[],
777 struct device *alloc_devs[])
778 {
779 struct hantro_ctx *ctx = vb2_get_drv_priv(vq);
780 struct v4l2_pix_format_mplane *pixfmt;
781 int i;
782
783 switch (vq->type) {
784 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
785 pixfmt = &ctx->dst_fmt;
786 break;
787 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
788 pixfmt = &ctx->src_fmt;
789 break;
790 default:
791 vpu_err("invalid queue type: %d\n", vq->type);
792 return -EINVAL;
793 }
794
795 if (*num_planes) {
796 if (*num_planes != pixfmt->num_planes)
797 return -EINVAL;
798 for (i = 0; i < pixfmt->num_planes; ++i)
799 if (sizes[i] < pixfmt->plane_fmt[i].sizeimage)
800 return -EINVAL;
801 return 0;
802 }
803
804 *num_planes = pixfmt->num_planes;
805 for (i = 0; i < pixfmt->num_planes; ++i)
806 sizes[i] = pixfmt->plane_fmt[i].sizeimage;
807 return 0;
808 }
809
810 static int
hantro_buf_plane_check(struct vb2_buffer * vb,struct v4l2_pix_format_mplane * pixfmt)811 hantro_buf_plane_check(struct vb2_buffer *vb,
812 struct v4l2_pix_format_mplane *pixfmt)
813 {
814 unsigned int sz;
815 int i;
816
817 for (i = 0; i < pixfmt->num_planes; ++i) {
818 sz = pixfmt->plane_fmt[i].sizeimage;
819 vpu_debug(4, "plane %d size: %ld, sizeimage: %u\n",
820 i, vb2_plane_size(vb, i), sz);
821 if (vb2_plane_size(vb, i) < sz) {
822 vpu_err("plane %d is too small for output\n", i);
823 return -EINVAL;
824 }
825 }
826 return 0;
827 }
828
hantro_buf_prepare(struct vb2_buffer * vb)829 static int hantro_buf_prepare(struct vb2_buffer *vb)
830 {
831 struct vb2_queue *vq = vb->vb2_queue;
832 struct hantro_ctx *ctx = vb2_get_drv_priv(vq);
833 struct v4l2_pix_format_mplane *pix_fmt;
834 int ret;
835
836 if (V4L2_TYPE_IS_OUTPUT(vq->type))
837 pix_fmt = &ctx->src_fmt;
838 else
839 pix_fmt = &ctx->dst_fmt;
840 ret = hantro_buf_plane_check(vb, pix_fmt);
841 if (ret)
842 return ret;
843 /*
844 * Buffer's bytesused must be written by driver for CAPTURE buffers.
845 * (for OUTPUT buffers, if userspace passes 0 bytesused, v4l2-core sets
846 * it to buffer length).
847 */
848 if (V4L2_TYPE_IS_CAPTURE(vq->type)) {
849 if (ctx->is_encoder)
850 vb2_set_plane_payload(vb, 0, 0);
851 else
852 vb2_set_plane_payload(vb, 0, pix_fmt->plane_fmt[0].sizeimage);
853 }
854
855 return 0;
856 }
857
hantro_buf_queue(struct vb2_buffer * vb)858 static void hantro_buf_queue(struct vb2_buffer *vb)
859 {
860 struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
861 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
862
863 if (V4L2_TYPE_IS_CAPTURE(vb->vb2_queue->type) &&
864 vb2_is_streaming(vb->vb2_queue) &&
865 v4l2_m2m_dst_buf_is_last(ctx->fh.m2m_ctx)) {
866 unsigned int i;
867
868 for (i = 0; i < vb->num_planes; i++)
869 vb2_set_plane_payload(vb, i, 0);
870
871 vbuf->field = V4L2_FIELD_NONE;
872 vbuf->sequence = ctx->sequence_cap++;
873
874 v4l2_m2m_last_buffer_done(ctx->fh.m2m_ctx, vbuf);
875 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event);
876 return;
877 }
878
879 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
880 }
881
hantro_vq_is_coded(struct vb2_queue * q)882 static bool hantro_vq_is_coded(struct vb2_queue *q)
883 {
884 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
885
886 return ctx->is_encoder != V4L2_TYPE_IS_OUTPUT(q->type);
887 }
888
hantro_start_streaming(struct vb2_queue * q,unsigned int count)889 static int hantro_start_streaming(struct vb2_queue *q, unsigned int count)
890 {
891 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
892 int ret = 0;
893
894 v4l2_m2m_update_start_streaming_state(ctx->fh.m2m_ctx, q);
895
896 if (V4L2_TYPE_IS_OUTPUT(q->type))
897 ctx->sequence_out = 0;
898 else
899 ctx->sequence_cap = 0;
900
901 if (hantro_vq_is_coded(q)) {
902 enum hantro_codec_mode codec_mode;
903
904 if (V4L2_TYPE_IS_OUTPUT(q->type))
905 codec_mode = ctx->vpu_src_fmt->codec_mode;
906 else
907 codec_mode = ctx->vpu_dst_fmt->codec_mode;
908
909 vpu_debug(4, "Codec mode = %d\n", codec_mode);
910 ctx->codec_ops = &ctx->dev->variant->codec_ops[codec_mode];
911 if (ctx->codec_ops->init) {
912 ret = ctx->codec_ops->init(ctx);
913 if (ret)
914 return ret;
915 }
916
917 if (hantro_needs_postproc(ctx, ctx->vpu_dst_fmt)) {
918 ret = hantro_postproc_init(ctx);
919 if (ret)
920 goto err_codec_exit;
921 }
922 }
923 return ret;
924
925 err_codec_exit:
926 if (ctx->codec_ops->exit)
927 ctx->codec_ops->exit(ctx);
928 return ret;
929 }
930
931 static void
hantro_return_bufs(struct vb2_queue * q,struct vb2_v4l2_buffer * (* buf_remove)(struct v4l2_m2m_ctx *))932 hantro_return_bufs(struct vb2_queue *q,
933 struct vb2_v4l2_buffer *(*buf_remove)(struct v4l2_m2m_ctx *))
934 {
935 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
936
937 for (;;) {
938 struct vb2_v4l2_buffer *vbuf;
939
940 vbuf = buf_remove(ctx->fh.m2m_ctx);
941 if (!vbuf)
942 break;
943 v4l2_ctrl_request_complete(vbuf->vb2_buf.req_obj.req,
944 &ctx->ctrl_handler);
945 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
946 }
947 }
948
hantro_stop_streaming(struct vb2_queue * q)949 static void hantro_stop_streaming(struct vb2_queue *q)
950 {
951 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
952
953 if (hantro_vq_is_coded(q)) {
954 hantro_postproc_free(ctx);
955 if (ctx->codec_ops && ctx->codec_ops->exit)
956 ctx->codec_ops->exit(ctx);
957 }
958
959 /*
960 * The mem2mem framework calls v4l2_m2m_cancel_job before
961 * .stop_streaming, so there isn't any job running and
962 * it is safe to return all the buffers.
963 */
964 if (V4L2_TYPE_IS_OUTPUT(q->type))
965 hantro_return_bufs(q, v4l2_m2m_src_buf_remove);
966 else
967 hantro_return_bufs(q, v4l2_m2m_dst_buf_remove);
968
969 v4l2_m2m_update_stop_streaming_state(ctx->fh.m2m_ctx, q);
970
971 if (V4L2_TYPE_IS_OUTPUT(q->type) &&
972 v4l2_m2m_has_stopped(ctx->fh.m2m_ctx))
973 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event);
974 }
975
hantro_buf_request_complete(struct vb2_buffer * vb)976 static void hantro_buf_request_complete(struct vb2_buffer *vb)
977 {
978 struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
979
980 v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->ctrl_handler);
981 }
982
hantro_buf_out_validate(struct vb2_buffer * vb)983 static int hantro_buf_out_validate(struct vb2_buffer *vb)
984 {
985 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
986
987 vbuf->field = V4L2_FIELD_NONE;
988 return 0;
989 }
990
991 const struct vb2_ops hantro_queue_ops = {
992 .queue_setup = hantro_queue_setup,
993 .buf_prepare = hantro_buf_prepare,
994 .buf_queue = hantro_buf_queue,
995 .buf_out_validate = hantro_buf_out_validate,
996 .buf_request_complete = hantro_buf_request_complete,
997 .start_streaming = hantro_start_streaming,
998 .stop_streaming = hantro_stop_streaming,
999 .wait_prepare = vb2_ops_wait_prepare,
1000 .wait_finish = vb2_ops_wait_finish,
1001 };
1002