1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Hantro VPU codec driver
4 *
5 * Copyright (C) 2018 Collabora, Ltd.
6 * Copyright (C) 2018 Rockchip Electronics Co., Ltd.
7 * Alpha Lin <Alpha.Lin@rock-chips.com>
8 * Jeffy Chen <jeffy.chen@rock-chips.com>
9 *
10 * Copyright 2018 Google LLC.
11 * Tomasz Figa <tfiga@chromium.org>
12 *
13 * Based on s5p-mfc driver by Samsung Electronics Co., Ltd.
14 * Copyright (C) 2010-2011 Samsung Electronics Co., Ltd.
15 */
16
17 #include <linux/interrupt.h>
18 #include <linux/io.h>
19 #include <linux/module.h>
20 #include <linux/pm_runtime.h>
21 #include <linux/videodev2.h>
22 #include <linux/workqueue.h>
23 #include <media/v4l2-ctrls.h>
24 #include <media/v4l2-event.h>
25 #include <media/v4l2-mem2mem.h>
26
27 #include "hantro.h"
28 #include "hantro_hw.h"
29 #include "hantro_v4l2.h"
30
31 #define HANTRO_DEFAULT_BIT_DEPTH 8
32
33 static int hantro_set_fmt_out(struct hantro_ctx *ctx,
34 struct v4l2_pix_format_mplane *pix_mp,
35 bool need_postproc);
36 static int hantro_set_fmt_cap(struct hantro_ctx *ctx,
37 struct v4l2_pix_format_mplane *pix_mp);
38
39 static const struct hantro_fmt *
hantro_get_formats(const struct hantro_ctx * ctx,unsigned int * num_fmts,bool need_postproc)40 hantro_get_formats(const struct hantro_ctx *ctx, unsigned int *num_fmts, bool need_postproc)
41 {
42 const struct hantro_fmt *formats;
43
44 if (need_postproc) {
45 *num_fmts = 0;
46 return NULL;
47 }
48
49 if (ctx->is_encoder) {
50 formats = ctx->dev->variant->enc_fmts;
51 *num_fmts = ctx->dev->variant->num_enc_fmts;
52 } else {
53 formats = ctx->dev->variant->dec_fmts;
54 *num_fmts = ctx->dev->variant->num_dec_fmts;
55 }
56
57 return formats;
58 }
59
60 static const struct hantro_fmt *
hantro_get_postproc_formats(const struct hantro_ctx * ctx,unsigned int * num_fmts)61 hantro_get_postproc_formats(const struct hantro_ctx *ctx,
62 unsigned int *num_fmts)
63 {
64 struct hantro_dev *vpu = ctx->dev;
65
66 if (ctx->is_encoder || !vpu->variant->postproc_fmts) {
67 *num_fmts = 0;
68 return NULL;
69 }
70
71 *num_fmts = ctx->dev->variant->num_postproc_fmts;
72 return ctx->dev->variant->postproc_fmts;
73 }
74
hantro_get_format_depth(u32 fourcc)75 int hantro_get_format_depth(u32 fourcc)
76 {
77 switch (fourcc) {
78 case V4L2_PIX_FMT_P010:
79 case V4L2_PIX_FMT_P010_4L4:
80 case V4L2_PIX_FMT_NV15_4L4:
81 return 10;
82 default:
83 return 8;
84 }
85 }
86
87 static bool
hantro_check_depth_match(const struct hantro_fmt * fmt,int bit_depth)88 hantro_check_depth_match(const struct hantro_fmt *fmt, int bit_depth)
89 {
90 int fmt_depth;
91
92 if (!fmt->match_depth && !fmt->postprocessed)
93 return true;
94
95 /* 0 means default depth, which is 8 */
96 if (!bit_depth)
97 bit_depth = HANTRO_DEFAULT_BIT_DEPTH;
98
99 fmt_depth = hantro_get_format_depth(fmt->fourcc);
100
101 /*
102 * Allow only downconversion for postproc formats for now.
103 * It may be possible to relax that on some HW.
104 */
105 if (!fmt->match_depth)
106 return fmt_depth <= bit_depth;
107
108 return fmt_depth == bit_depth;
109 }
110
111 static const struct hantro_fmt *
hantro_find_format(const struct hantro_ctx * ctx,u32 fourcc)112 hantro_find_format(const struct hantro_ctx *ctx, u32 fourcc)
113 {
114 const struct hantro_fmt *formats;
115 unsigned int i, num_fmts;
116
117 formats = hantro_get_formats(ctx, &num_fmts, HANTRO_AUTO_POSTPROC);
118 for (i = 0; i < num_fmts; i++)
119 if (formats[i].fourcc == fourcc)
120 return &formats[i];
121
122 formats = hantro_get_postproc_formats(ctx, &num_fmts);
123 for (i = 0; i < num_fmts; i++)
124 if (formats[i].fourcc == fourcc)
125 return &formats[i];
126 return NULL;
127 }
128
129 const struct hantro_fmt *
hantro_get_default_fmt(const struct hantro_ctx * ctx,bool bitstream,int bit_depth,bool need_postproc)130 hantro_get_default_fmt(const struct hantro_ctx *ctx, bool bitstream,
131 int bit_depth, bool need_postproc)
132 {
133 const struct hantro_fmt *formats;
134 unsigned int i, num_fmts;
135
136 formats = hantro_get_formats(ctx, &num_fmts, need_postproc);
137 for (i = 0; i < num_fmts; i++) {
138 if (bitstream == (formats[i].codec_mode !=
139 HANTRO_MODE_NONE) &&
140 hantro_check_depth_match(&formats[i], bit_depth))
141 return &formats[i];
142 }
143
144 formats = hantro_get_postproc_formats(ctx, &num_fmts);
145 for (i = 0; i < num_fmts; i++) {
146 if (bitstream == (formats[i].codec_mode !=
147 HANTRO_MODE_NONE) &&
148 hantro_check_depth_match(&formats[i], bit_depth))
149 return &formats[i];
150 }
151
152 return NULL;
153 }
154
vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)155 static int vidioc_querycap(struct file *file, void *priv,
156 struct v4l2_capability *cap)
157 {
158 struct hantro_dev *vpu = video_drvdata(file);
159 struct video_device *vdev = video_devdata(file);
160
161 strscpy(cap->driver, vpu->dev->driver->name, sizeof(cap->driver));
162 strscpy(cap->card, vdev->name, sizeof(cap->card));
163 return 0;
164 }
165
vidioc_enum_framesizes(struct file * file,void * priv,struct v4l2_frmsizeenum * fsize)166 static int vidioc_enum_framesizes(struct file *file, void *priv,
167 struct v4l2_frmsizeenum *fsize)
168 {
169 struct hantro_ctx *ctx = fh_to_ctx(priv);
170 const struct hantro_fmt *fmt;
171
172 fmt = hantro_find_format(ctx, fsize->pixel_format);
173 if (!fmt) {
174 vpu_debug(0, "unsupported bitstream format (%08x)\n",
175 fsize->pixel_format);
176 return -EINVAL;
177 }
178
179 /* For non-coded formats check if postprocessing scaling is possible */
180 if (fmt->codec_mode == HANTRO_MODE_NONE) {
181 if (hantro_needs_postproc(ctx, fmt))
182 return hanto_postproc_enum_framesizes(ctx, fsize);
183 else
184 return -ENOTTY;
185 } else if (fsize->index != 0) {
186 vpu_debug(0, "invalid frame size index (expected 0, got %d)\n",
187 fsize->index);
188 return -EINVAL;
189 }
190
191 fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
192 fsize->stepwise = fmt->frmsize;
193
194 return 0;
195 }
196
vidioc_enum_fmt(struct file * file,void * priv,struct v4l2_fmtdesc * f,bool capture)197 static int vidioc_enum_fmt(struct file *file, void *priv,
198 struct v4l2_fmtdesc *f, bool capture)
199
200 {
201 struct hantro_ctx *ctx = fh_to_ctx(priv);
202 const struct hantro_fmt *fmt, *formats;
203 unsigned int num_fmts, i, j = 0;
204 bool skip_mode_none, enum_all_formats;
205 u32 index = f->index & ~V4L2_FMTDESC_FLAG_ENUM_ALL;
206
207 /*
208 * If the V4L2_FMTDESC_FLAG_ENUM_ALL flag is set, we want to enumerate all
209 * hardware supported pixel formats
210 */
211 enum_all_formats = !!(f->index & V4L2_FMTDESC_FLAG_ENUM_ALL);
212 f->index = index;
213
214 /*
215 * When dealing with an encoder:
216 * - on the capture side we want to filter out all MODE_NONE formats.
217 * - on the output side we want to filter out all formats that are
218 * not MODE_NONE.
219 * When dealing with a decoder:
220 * - on the capture side we want to filter out all formats that are
221 * not MODE_NONE.
222 * - on the output side we want to filter out all MODE_NONE formats.
223 */
224 skip_mode_none = capture == ctx->is_encoder;
225
226 formats = hantro_get_formats(ctx, &num_fmts, HANTRO_AUTO_POSTPROC);
227 for (i = 0; i < num_fmts; i++) {
228 bool mode_none = formats[i].codec_mode == HANTRO_MODE_NONE;
229 fmt = &formats[i];
230
231 if (skip_mode_none == mode_none)
232 continue;
233 if (!hantro_check_depth_match(fmt, ctx->bit_depth) && !enum_all_formats)
234 continue;
235 if (j == index) {
236 f->pixelformat = fmt->fourcc;
237 return 0;
238 }
239 ++j;
240 }
241
242 /*
243 * Enumerate post-processed formats. As per the specification,
244 * we enumerated these formats after natively decoded formats
245 * as a hint for applications on what's the preferred fomat.
246 */
247 if (!capture)
248 return -EINVAL;
249 formats = hantro_get_postproc_formats(ctx, &num_fmts);
250 for (i = 0; i < num_fmts; i++) {
251 fmt = &formats[i];
252
253 if (!hantro_check_depth_match(fmt, ctx->bit_depth) && !enum_all_formats)
254 continue;
255 if (j == index) {
256 f->pixelformat = fmt->fourcc;
257 return 0;
258 }
259 ++j;
260 }
261
262 return -EINVAL;
263 }
264
vidioc_enum_fmt_vid_cap(struct file * file,void * priv,struct v4l2_fmtdesc * f)265 static int vidioc_enum_fmt_vid_cap(struct file *file, void *priv,
266 struct v4l2_fmtdesc *f)
267 {
268 return vidioc_enum_fmt(file, priv, f, true);
269 }
270
vidioc_enum_fmt_vid_out(struct file * file,void * priv,struct v4l2_fmtdesc * f)271 static int vidioc_enum_fmt_vid_out(struct file *file, void *priv,
272 struct v4l2_fmtdesc *f)
273 {
274 return vidioc_enum_fmt(file, priv, f, false);
275 }
276
vidioc_g_fmt_out_mplane(struct file * file,void * priv,struct v4l2_format * f)277 static int vidioc_g_fmt_out_mplane(struct file *file, void *priv,
278 struct v4l2_format *f)
279 {
280 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
281 struct hantro_ctx *ctx = fh_to_ctx(priv);
282
283 vpu_debug(4, "f->type = %d\n", f->type);
284
285 *pix_mp = ctx->src_fmt;
286
287 return 0;
288 }
289
vidioc_g_fmt_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)290 static int vidioc_g_fmt_cap_mplane(struct file *file, void *priv,
291 struct v4l2_format *f)
292 {
293 struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
294 struct hantro_ctx *ctx = fh_to_ctx(priv);
295
296 vpu_debug(4, "f->type = %d\n", f->type);
297
298 *pix_mp = ctx->dst_fmt;
299
300 return 0;
301 }
302
hantro_try_fmt(const struct hantro_ctx * ctx,struct v4l2_pix_format_mplane * pix_mp,enum v4l2_buf_type type)303 static int hantro_try_fmt(const struct hantro_ctx *ctx,
304 struct v4l2_pix_format_mplane *pix_mp,
305 enum v4l2_buf_type type)
306 {
307 const struct hantro_fmt *fmt;
308 const struct hantro_fmt *vpu_fmt;
309 bool capture = V4L2_TYPE_IS_CAPTURE(type);
310 bool coded;
311
312 coded = capture == ctx->is_encoder;
313
314 vpu_debug(4, "trying format %p4cc\n", &pix_mp->pixelformat);
315
316 fmt = hantro_find_format(ctx, pix_mp->pixelformat);
317 if (!fmt) {
318 fmt = hantro_get_default_fmt(ctx, coded, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
319 pix_mp->pixelformat = fmt->fourcc;
320 }
321
322 if (coded) {
323 pix_mp->num_planes = 1;
324 vpu_fmt = fmt;
325 } else if (ctx->is_encoder) {
326 vpu_fmt = hantro_find_format(ctx, ctx->dst_fmt.pixelformat);
327 } else {
328 /*
329 * Width/height on the CAPTURE end of a decoder are ignored and
330 * replaced by the OUTPUT ones.
331 */
332 pix_mp->width = ctx->src_fmt.width;
333 pix_mp->height = ctx->src_fmt.height;
334 vpu_fmt = fmt;
335 }
336
337 pix_mp->field = V4L2_FIELD_NONE;
338
339 v4l2_apply_frmsize_constraints(&pix_mp->width, &pix_mp->height,
340 &vpu_fmt->frmsize);
341
342 if (!coded) {
343 /* Fill remaining fields */
344 v4l2_fill_pixfmt_mp(pix_mp, fmt->fourcc, pix_mp->width,
345 pix_mp->height);
346 if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_H264_SLICE &&
347 !hantro_needs_postproc(ctx, fmt))
348 pix_mp->plane_fmt[0].sizeimage +=
349 hantro_h264_mv_size(pix_mp->width,
350 pix_mp->height);
351 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_VP9_FRAME &&
352 !hantro_needs_postproc(ctx, fmt))
353 pix_mp->plane_fmt[0].sizeimage +=
354 hantro_vp9_mv_size(pix_mp->width,
355 pix_mp->height);
356 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_HEVC_SLICE &&
357 !hantro_needs_postproc(ctx, fmt))
358 pix_mp->plane_fmt[0].sizeimage +=
359 hantro_hevc_mv_size(pix_mp->width,
360 pix_mp->height);
361 else if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_AV1_FRAME &&
362 !hantro_needs_postproc(ctx, fmt))
363 pix_mp->plane_fmt[0].sizeimage +=
364 hantro_av1_mv_size(pix_mp->width,
365 pix_mp->height);
366 } else if (!pix_mp->plane_fmt[0].sizeimage) {
367 /*
368 * For coded formats the application can specify
369 * sizeimage. If the application passes a zero sizeimage,
370 * let's default to the maximum frame size.
371 */
372 pix_mp->plane_fmt[0].sizeimage = fmt->header_size +
373 pix_mp->width * pix_mp->height * fmt->max_depth;
374 }
375
376 return 0;
377 }
378
vidioc_try_fmt_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)379 static int vidioc_try_fmt_cap_mplane(struct file *file, void *priv,
380 struct v4l2_format *f)
381 {
382 return hantro_try_fmt(fh_to_ctx(priv), &f->fmt.pix_mp, f->type);
383 }
384
vidioc_try_fmt_out_mplane(struct file * file,void * priv,struct v4l2_format * f)385 static int vidioc_try_fmt_out_mplane(struct file *file, void *priv,
386 struct v4l2_format *f)
387 {
388 return hantro_try_fmt(fh_to_ctx(priv), &f->fmt.pix_mp, f->type);
389 }
390
391 static void
hantro_reset_fmt(struct v4l2_pix_format_mplane * fmt,const struct hantro_fmt * vpu_fmt)392 hantro_reset_fmt(struct v4l2_pix_format_mplane *fmt,
393 const struct hantro_fmt *vpu_fmt)
394 {
395 memset(fmt, 0, sizeof(*fmt));
396
397 fmt->pixelformat = vpu_fmt->fourcc;
398 fmt->field = V4L2_FIELD_NONE;
399 fmt->colorspace = V4L2_COLORSPACE_JPEG;
400 fmt->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
401 fmt->quantization = V4L2_QUANTIZATION_DEFAULT;
402 fmt->xfer_func = V4L2_XFER_FUNC_DEFAULT;
403 }
404
405 static void
hantro_reset_encoded_fmt(struct hantro_ctx * ctx)406 hantro_reset_encoded_fmt(struct hantro_ctx *ctx)
407 {
408 const struct hantro_fmt *vpu_fmt;
409 struct v4l2_pix_format_mplane fmt;
410
411 vpu_fmt = hantro_get_default_fmt(ctx, true, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
412 if (!vpu_fmt)
413 return;
414
415 hantro_reset_fmt(&fmt, vpu_fmt);
416 fmt.width = vpu_fmt->frmsize.min_width;
417 fmt.height = vpu_fmt->frmsize.min_height;
418 if (ctx->is_encoder)
419 hantro_set_fmt_cap(ctx, &fmt);
420 else
421 hantro_set_fmt_out(ctx, &fmt, HANTRO_AUTO_POSTPROC);
422 }
423
424 int
hantro_reset_raw_fmt(struct hantro_ctx * ctx,int bit_depth,bool need_postproc)425 hantro_reset_raw_fmt(struct hantro_ctx *ctx, int bit_depth, bool need_postproc)
426 {
427 const struct hantro_fmt *raw_vpu_fmt;
428 struct v4l2_pix_format_mplane raw_fmt, *encoded_fmt;
429 int ret;
430
431 raw_vpu_fmt = hantro_get_default_fmt(ctx, false, bit_depth, need_postproc);
432 if (!raw_vpu_fmt)
433 return -EINVAL;
434
435 if (ctx->is_encoder) {
436 encoded_fmt = &ctx->dst_fmt;
437 ctx->vpu_src_fmt = raw_vpu_fmt;
438 } else {
439 encoded_fmt = &ctx->src_fmt;
440 }
441
442 hantro_reset_fmt(&raw_fmt, raw_vpu_fmt);
443 raw_fmt.width = encoded_fmt->width;
444 raw_fmt.height = encoded_fmt->height;
445 if (ctx->is_encoder)
446 ret = hantro_set_fmt_out(ctx, &raw_fmt, need_postproc);
447 else
448 ret = hantro_set_fmt_cap(ctx, &raw_fmt);
449
450 if (!ret) {
451 ctx->bit_depth = bit_depth;
452 ctx->need_postproc = need_postproc;
453 }
454
455 return ret;
456 }
457
hantro_reset_fmts(struct hantro_ctx * ctx)458 void hantro_reset_fmts(struct hantro_ctx *ctx)
459 {
460 hantro_reset_encoded_fmt(ctx);
461 hantro_reset_raw_fmt(ctx, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
462 }
463
464 static void
hantro_update_requires_request(struct hantro_ctx * ctx,u32 fourcc)465 hantro_update_requires_request(struct hantro_ctx *ctx, u32 fourcc)
466 {
467 switch (fourcc) {
468 case V4L2_PIX_FMT_JPEG:
469 ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = false;
470 break;
471 case V4L2_PIX_FMT_MPEG2_SLICE:
472 case V4L2_PIX_FMT_VP8_FRAME:
473 case V4L2_PIX_FMT_H264_SLICE:
474 case V4L2_PIX_FMT_HEVC_SLICE:
475 case V4L2_PIX_FMT_VP9_FRAME:
476 ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = true;
477 break;
478 default:
479 break;
480 }
481 }
482
483 static void
hantro_update_requires_hold_capture_buf(struct hantro_ctx * ctx,u32 fourcc)484 hantro_update_requires_hold_capture_buf(struct hantro_ctx *ctx, u32 fourcc)
485 {
486 struct vb2_queue *vq;
487
488 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
489 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
490
491 switch (fourcc) {
492 case V4L2_PIX_FMT_JPEG:
493 case V4L2_PIX_FMT_MPEG2_SLICE:
494 case V4L2_PIX_FMT_VP8_FRAME:
495 case V4L2_PIX_FMT_HEVC_SLICE:
496 case V4L2_PIX_FMT_VP9_FRAME:
497 vq->subsystem_flags &= ~(VB2_V4L2_FL_SUPPORTS_M2M_HOLD_CAPTURE_BUF);
498 break;
499 case V4L2_PIX_FMT_H264_SLICE:
500 vq->subsystem_flags |= VB2_V4L2_FL_SUPPORTS_M2M_HOLD_CAPTURE_BUF;
501 break;
502 default:
503 break;
504 }
505 }
506
hantro_set_fmt_out(struct hantro_ctx * ctx,struct v4l2_pix_format_mplane * pix_mp,bool need_postproc)507 static int hantro_set_fmt_out(struct hantro_ctx *ctx,
508 struct v4l2_pix_format_mplane *pix_mp,
509 bool need_postproc)
510 {
511 struct vb2_queue *vq;
512 int ret;
513
514 vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
515 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
516 ret = hantro_try_fmt(ctx, pix_mp, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
517 if (ret)
518 return ret;
519
520 if (!ctx->is_encoder) {
521 /*
522 * In order to support dynamic resolution change,
523 * the decoder admits a resolution change, as long
524 * as the pixelformat remains.
525 */
526 if (vb2_is_streaming(vq) && pix_mp->pixelformat != ctx->src_fmt.pixelformat) {
527 return -EBUSY;
528 }
529 } else {
530 /*
531 * The encoder doesn't admit a format change if
532 * there are OUTPUT buffers allocated.
533 */
534 if (vb2_is_busy(vq))
535 return -EBUSY;
536 }
537
538 ctx->vpu_src_fmt = hantro_find_format(ctx, pix_mp->pixelformat);
539 ctx->src_fmt = *pix_mp;
540
541 /*
542 * Current raw format might have become invalid with newly
543 * selected codec, so reset it to default just to be safe and
544 * keep internal driver state sane. User is mandated to set
545 * the raw format again after we return, so we don't need
546 * anything smarter.
547 * Note that hantro_reset_raw_fmt() also propagates size
548 * changes to the raw format.
549 */
550 if (!ctx->is_encoder)
551 hantro_reset_raw_fmt(ctx,
552 hantro_get_format_depth(pix_mp->pixelformat),
553 need_postproc);
554
555 /* Colorimetry information are always propagated. */
556 ctx->dst_fmt.colorspace = pix_mp->colorspace;
557 ctx->dst_fmt.ycbcr_enc = pix_mp->ycbcr_enc;
558 ctx->dst_fmt.xfer_func = pix_mp->xfer_func;
559 ctx->dst_fmt.quantization = pix_mp->quantization;
560
561 hantro_update_requires_request(ctx, pix_mp->pixelformat);
562 hantro_update_requires_hold_capture_buf(ctx, pix_mp->pixelformat);
563
564 vpu_debug(0, "OUTPUT codec mode: %d\n", ctx->vpu_src_fmt->codec_mode);
565 vpu_debug(0, "fmt - w: %d, h: %d\n",
566 pix_mp->width, pix_mp->height);
567 return 0;
568 }
569
hantro_set_fmt_cap(struct hantro_ctx * ctx,struct v4l2_pix_format_mplane * pix_mp)570 static int hantro_set_fmt_cap(struct hantro_ctx *ctx,
571 struct v4l2_pix_format_mplane *pix_mp)
572 {
573 int ret;
574
575 if (ctx->is_encoder) {
576 struct vb2_queue *peer_vq;
577
578 /*
579 * Since format change on the CAPTURE queue will reset
580 * the OUTPUT queue, we can't allow doing so
581 * when the OUTPUT queue has buffers allocated.
582 */
583 peer_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
584 V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
585 if (vb2_is_busy(peer_vq) &&
586 (pix_mp->pixelformat != ctx->dst_fmt.pixelformat ||
587 pix_mp->height != ctx->dst_fmt.height ||
588 pix_mp->width != ctx->dst_fmt.width))
589 return -EBUSY;
590 }
591
592 ret = hantro_try_fmt(ctx, pix_mp, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
593 if (ret)
594 return ret;
595
596 ctx->vpu_dst_fmt = hantro_find_format(ctx, pix_mp->pixelformat);
597 ctx->dst_fmt = *pix_mp;
598
599 /*
600 * Current raw format might have become invalid with newly
601 * selected codec, so reset it to default just to be safe and
602 * keep internal driver state sane. User is mandated to set
603 * the raw format again after we return, so we don't need
604 * anything smarter.
605 * Note that hantro_reset_raw_fmt() also propagates size
606 * changes to the raw format.
607 */
608 if (ctx->is_encoder)
609 hantro_reset_raw_fmt(ctx, HANTRO_DEFAULT_BIT_DEPTH, HANTRO_AUTO_POSTPROC);
610
611 /* Colorimetry information are always propagated. */
612 ctx->src_fmt.colorspace = pix_mp->colorspace;
613 ctx->src_fmt.ycbcr_enc = pix_mp->ycbcr_enc;
614 ctx->src_fmt.xfer_func = pix_mp->xfer_func;
615 ctx->src_fmt.quantization = pix_mp->quantization;
616
617 vpu_debug(0, "CAPTURE codec mode: %d\n", ctx->vpu_dst_fmt->codec_mode);
618 vpu_debug(0, "fmt - w: %d, h: %d\n",
619 pix_mp->width, pix_mp->height);
620
621 hantro_update_requires_request(ctx, pix_mp->pixelformat);
622
623 return 0;
624 }
625
626 static int
vidioc_s_fmt_out_mplane(struct file * file,void * priv,struct v4l2_format * f)627 vidioc_s_fmt_out_mplane(struct file *file, void *priv, struct v4l2_format *f)
628 {
629 return hantro_set_fmt_out(fh_to_ctx(priv), &f->fmt.pix_mp, HANTRO_AUTO_POSTPROC);
630 }
631
632 static int
vidioc_s_fmt_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)633 vidioc_s_fmt_cap_mplane(struct file *file, void *priv, struct v4l2_format *f)
634 {
635 return hantro_set_fmt_cap(fh_to_ctx(priv), &f->fmt.pix_mp);
636 }
637
vidioc_g_selection(struct file * file,void * priv,struct v4l2_selection * sel)638 static int vidioc_g_selection(struct file *file, void *priv,
639 struct v4l2_selection *sel)
640 {
641 struct hantro_ctx *ctx = fh_to_ctx(priv);
642
643 /* Crop only supported on source. */
644 if (!ctx->is_encoder ||
645 sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
646 return -EINVAL;
647
648 switch (sel->target) {
649 case V4L2_SEL_TGT_CROP_DEFAULT:
650 case V4L2_SEL_TGT_CROP_BOUNDS:
651 sel->r.top = 0;
652 sel->r.left = 0;
653 sel->r.width = ctx->src_fmt.width;
654 sel->r.height = ctx->src_fmt.height;
655 break;
656 case V4L2_SEL_TGT_CROP:
657 sel->r.top = 0;
658 sel->r.left = 0;
659 sel->r.width = ctx->dst_fmt.width;
660 sel->r.height = ctx->dst_fmt.height;
661 break;
662 default:
663 return -EINVAL;
664 }
665
666 return 0;
667 }
668
vidioc_s_selection(struct file * file,void * priv,struct v4l2_selection * sel)669 static int vidioc_s_selection(struct file *file, void *priv,
670 struct v4l2_selection *sel)
671 {
672 struct hantro_ctx *ctx = fh_to_ctx(priv);
673 struct v4l2_rect *rect = &sel->r;
674 struct vb2_queue *vq;
675
676 /* Crop only supported on source. */
677 if (!ctx->is_encoder ||
678 sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
679 return -EINVAL;
680
681 /* Change not allowed if the queue is streaming. */
682 vq = v4l2_m2m_get_src_vq(ctx->fh.m2m_ctx);
683 if (vb2_is_streaming(vq))
684 return -EBUSY;
685
686 if (sel->target != V4L2_SEL_TGT_CROP)
687 return -EINVAL;
688
689 /*
690 * We do not support offsets, and we can crop only inside
691 * right-most or bottom-most macroblocks.
692 */
693 if (rect->left != 0 || rect->top != 0 ||
694 round_up(rect->width, MB_DIM) != ctx->src_fmt.width ||
695 round_up(rect->height, MB_DIM) != ctx->src_fmt.height) {
696 /* Default to full frame for incorrect settings. */
697 rect->left = 0;
698 rect->top = 0;
699 rect->width = ctx->src_fmt.width;
700 rect->height = ctx->src_fmt.height;
701 } else {
702 /* We support widths aligned to 4 pixels and arbitrary heights. */
703 rect->width = round_up(rect->width, 4);
704 }
705
706 ctx->dst_fmt.width = rect->width;
707 ctx->dst_fmt.height = rect->height;
708
709 return 0;
710 }
711
712 static const struct v4l2_event hantro_eos_event = {
713 .type = V4L2_EVENT_EOS
714 };
715
vidioc_encoder_cmd(struct file * file,void * priv,struct v4l2_encoder_cmd * ec)716 static int vidioc_encoder_cmd(struct file *file, void *priv,
717 struct v4l2_encoder_cmd *ec)
718 {
719 struct hantro_ctx *ctx = fh_to_ctx(priv);
720 int ret;
721
722 ret = v4l2_m2m_ioctl_try_encoder_cmd(file, priv, ec);
723 if (ret < 0)
724 return ret;
725
726 if (!vb2_is_streaming(v4l2_m2m_get_src_vq(ctx->fh.m2m_ctx)) ||
727 !vb2_is_streaming(v4l2_m2m_get_dst_vq(ctx->fh.m2m_ctx)))
728 return 0;
729
730 ret = v4l2_m2m_ioctl_encoder_cmd(file, priv, ec);
731 if (ret < 0)
732 return ret;
733
734 if (ec->cmd == V4L2_ENC_CMD_STOP &&
735 v4l2_m2m_has_stopped(ctx->fh.m2m_ctx))
736 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event);
737
738 if (ec->cmd == V4L2_ENC_CMD_START)
739 vb2_clear_last_buffer_dequeued(&ctx->fh.m2m_ctx->cap_q_ctx.q);
740
741 return 0;
742 }
743
744 const struct v4l2_ioctl_ops hantro_ioctl_ops = {
745 .vidioc_querycap = vidioc_querycap,
746 .vidioc_enum_framesizes = vidioc_enum_framesizes,
747
748 .vidioc_try_fmt_vid_cap_mplane = vidioc_try_fmt_cap_mplane,
749 .vidioc_try_fmt_vid_out_mplane = vidioc_try_fmt_out_mplane,
750 .vidioc_s_fmt_vid_out_mplane = vidioc_s_fmt_out_mplane,
751 .vidioc_s_fmt_vid_cap_mplane = vidioc_s_fmt_cap_mplane,
752 .vidioc_g_fmt_vid_out_mplane = vidioc_g_fmt_out_mplane,
753 .vidioc_g_fmt_vid_cap_mplane = vidioc_g_fmt_cap_mplane,
754 .vidioc_enum_fmt_vid_out = vidioc_enum_fmt_vid_out,
755 .vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap,
756
757 .vidioc_reqbufs = v4l2_m2m_ioctl_reqbufs,
758 .vidioc_querybuf = v4l2_m2m_ioctl_querybuf,
759 .vidioc_qbuf = v4l2_m2m_ioctl_qbuf,
760 .vidioc_dqbuf = v4l2_m2m_ioctl_dqbuf,
761 .vidioc_prepare_buf = v4l2_m2m_ioctl_prepare_buf,
762 .vidioc_create_bufs = v4l2_m2m_ioctl_create_bufs,
763 .vidioc_remove_bufs = v4l2_m2m_ioctl_remove_bufs,
764 .vidioc_expbuf = v4l2_m2m_ioctl_expbuf,
765
766 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
767 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
768
769 .vidioc_streamon = v4l2_m2m_ioctl_streamon,
770 .vidioc_streamoff = v4l2_m2m_ioctl_streamoff,
771
772 .vidioc_g_selection = vidioc_g_selection,
773 .vidioc_s_selection = vidioc_s_selection,
774
775 .vidioc_decoder_cmd = v4l2_m2m_ioctl_stateless_decoder_cmd,
776 .vidioc_try_decoder_cmd = v4l2_m2m_ioctl_stateless_try_decoder_cmd,
777
778 .vidioc_try_encoder_cmd = v4l2_m2m_ioctl_try_encoder_cmd,
779 .vidioc_encoder_cmd = vidioc_encoder_cmd,
780 };
781
782 static int
hantro_queue_setup(struct vb2_queue * vq,unsigned int * num_buffers,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])783 hantro_queue_setup(struct vb2_queue *vq, unsigned int *num_buffers,
784 unsigned int *num_planes, unsigned int sizes[],
785 struct device *alloc_devs[])
786 {
787 struct hantro_ctx *ctx = vb2_get_drv_priv(vq);
788 struct v4l2_pix_format_mplane *pixfmt;
789 int i;
790
791 switch (vq->type) {
792 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
793 pixfmt = &ctx->dst_fmt;
794 break;
795 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
796 pixfmt = &ctx->src_fmt;
797 break;
798 default:
799 vpu_err("invalid queue type: %d\n", vq->type);
800 return -EINVAL;
801 }
802
803 if (*num_planes) {
804 if (*num_planes != pixfmt->num_planes)
805 return -EINVAL;
806 for (i = 0; i < pixfmt->num_planes; ++i)
807 if (sizes[i] < pixfmt->plane_fmt[i].sizeimage)
808 return -EINVAL;
809 return 0;
810 }
811
812 *num_planes = pixfmt->num_planes;
813 for (i = 0; i < pixfmt->num_planes; ++i)
814 sizes[i] = pixfmt->plane_fmt[i].sizeimage;
815 return 0;
816 }
817
818 static int
hantro_buf_plane_check(struct vb2_buffer * vb,struct v4l2_pix_format_mplane * pixfmt)819 hantro_buf_plane_check(struct vb2_buffer *vb,
820 struct v4l2_pix_format_mplane *pixfmt)
821 {
822 unsigned int sz;
823 int i;
824
825 for (i = 0; i < pixfmt->num_planes; ++i) {
826 sz = pixfmt->plane_fmt[i].sizeimage;
827 vpu_debug(4, "plane %d size: %ld, sizeimage: %u\n",
828 i, vb2_plane_size(vb, i), sz);
829 if (vb2_plane_size(vb, i) < sz) {
830 vpu_err("plane %d is too small for output\n", i);
831 return -EINVAL;
832 }
833 }
834 return 0;
835 }
836
hantro_buf_prepare(struct vb2_buffer * vb)837 static int hantro_buf_prepare(struct vb2_buffer *vb)
838 {
839 struct vb2_queue *vq = vb->vb2_queue;
840 struct hantro_ctx *ctx = vb2_get_drv_priv(vq);
841 struct v4l2_pix_format_mplane *pix_fmt;
842 int ret;
843
844 if (V4L2_TYPE_IS_OUTPUT(vq->type))
845 pix_fmt = &ctx->src_fmt;
846 else
847 pix_fmt = &ctx->dst_fmt;
848 ret = hantro_buf_plane_check(vb, pix_fmt);
849 if (ret)
850 return ret;
851 /*
852 * Buffer's bytesused must be written by driver for CAPTURE buffers.
853 * (for OUTPUT buffers, if userspace passes 0 bytesused, v4l2-core sets
854 * it to buffer length).
855 */
856 if (V4L2_TYPE_IS_CAPTURE(vq->type)) {
857 if (ctx->is_encoder)
858 vb2_set_plane_payload(vb, 0, 0);
859 else
860 vb2_set_plane_payload(vb, 0, pix_fmt->plane_fmt[0].sizeimage);
861 }
862
863 return 0;
864 }
865
hantro_buf_queue(struct vb2_buffer * vb)866 static void hantro_buf_queue(struct vb2_buffer *vb)
867 {
868 struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
869 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
870
871 if (V4L2_TYPE_IS_CAPTURE(vb->vb2_queue->type) &&
872 vb2_is_streaming(vb->vb2_queue) &&
873 v4l2_m2m_dst_buf_is_last(ctx->fh.m2m_ctx)) {
874 unsigned int i;
875
876 for (i = 0; i < vb->num_planes; i++)
877 vb2_set_plane_payload(vb, i, 0);
878
879 vbuf->field = V4L2_FIELD_NONE;
880 vbuf->sequence = ctx->sequence_cap++;
881
882 v4l2_m2m_last_buffer_done(ctx->fh.m2m_ctx, vbuf);
883 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event);
884 return;
885 }
886
887 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
888 }
889
hantro_vq_is_coded(struct vb2_queue * q)890 static bool hantro_vq_is_coded(struct vb2_queue *q)
891 {
892 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
893
894 return ctx->is_encoder != V4L2_TYPE_IS_OUTPUT(q->type);
895 }
896
hantro_start_streaming(struct vb2_queue * q,unsigned int count)897 static int hantro_start_streaming(struct vb2_queue *q, unsigned int count)
898 {
899 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
900 int ret = 0;
901
902 v4l2_m2m_update_start_streaming_state(ctx->fh.m2m_ctx, q);
903
904 if (V4L2_TYPE_IS_OUTPUT(q->type))
905 ctx->sequence_out = 0;
906 else
907 ctx->sequence_cap = 0;
908
909 if (hantro_vq_is_coded(q)) {
910 enum hantro_codec_mode codec_mode;
911
912 if (V4L2_TYPE_IS_OUTPUT(q->type))
913 codec_mode = ctx->vpu_src_fmt->codec_mode;
914 else
915 codec_mode = ctx->vpu_dst_fmt->codec_mode;
916
917 vpu_debug(4, "Codec mode = %d\n", codec_mode);
918 ctx->codec_ops = &ctx->dev->variant->codec_ops[codec_mode];
919 if (ctx->codec_ops->init) {
920 ret = ctx->codec_ops->init(ctx);
921 if (ret)
922 return ret;
923 }
924
925 if (hantro_needs_postproc(ctx, ctx->vpu_dst_fmt)) {
926 ret = hantro_postproc_init(ctx);
927 if (ret)
928 goto err_codec_exit;
929 }
930 }
931 return ret;
932
933 err_codec_exit:
934 if (ctx->codec_ops->exit)
935 ctx->codec_ops->exit(ctx);
936 return ret;
937 }
938
939 static void
hantro_return_bufs(struct vb2_queue * q,struct vb2_v4l2_buffer * (* buf_remove)(struct v4l2_m2m_ctx *))940 hantro_return_bufs(struct vb2_queue *q,
941 struct vb2_v4l2_buffer *(*buf_remove)(struct v4l2_m2m_ctx *))
942 {
943 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
944
945 for (;;) {
946 struct vb2_v4l2_buffer *vbuf;
947
948 vbuf = buf_remove(ctx->fh.m2m_ctx);
949 if (!vbuf)
950 break;
951 v4l2_ctrl_request_complete(vbuf->vb2_buf.req_obj.req,
952 &ctx->ctrl_handler);
953 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
954 }
955 }
956
hantro_stop_streaming(struct vb2_queue * q)957 static void hantro_stop_streaming(struct vb2_queue *q)
958 {
959 struct hantro_ctx *ctx = vb2_get_drv_priv(q);
960
961 if (hantro_vq_is_coded(q)) {
962 hantro_postproc_free(ctx);
963 if (ctx->codec_ops && ctx->codec_ops->exit)
964 ctx->codec_ops->exit(ctx);
965 }
966
967 /*
968 * The mem2mem framework calls v4l2_m2m_cancel_job before
969 * .stop_streaming, so there isn't any job running and
970 * it is safe to return all the buffers.
971 */
972 if (V4L2_TYPE_IS_OUTPUT(q->type))
973 hantro_return_bufs(q, v4l2_m2m_src_buf_remove);
974 else
975 hantro_return_bufs(q, v4l2_m2m_dst_buf_remove);
976
977 v4l2_m2m_update_stop_streaming_state(ctx->fh.m2m_ctx, q);
978
979 if (V4L2_TYPE_IS_OUTPUT(q->type) &&
980 v4l2_m2m_has_stopped(ctx->fh.m2m_ctx))
981 v4l2_event_queue_fh(&ctx->fh, &hantro_eos_event);
982 }
983
hantro_buf_request_complete(struct vb2_buffer * vb)984 static void hantro_buf_request_complete(struct vb2_buffer *vb)
985 {
986 struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
987
988 v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->ctrl_handler);
989 }
990
hantro_buf_out_validate(struct vb2_buffer * vb)991 static int hantro_buf_out_validate(struct vb2_buffer *vb)
992 {
993 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
994
995 vbuf->field = V4L2_FIELD_NONE;
996 return 0;
997 }
998
999 const struct vb2_ops hantro_queue_ops = {
1000 .queue_setup = hantro_queue_setup,
1001 .buf_prepare = hantro_buf_prepare,
1002 .buf_queue = hantro_buf_queue,
1003 .buf_out_validate = hantro_buf_out_validate,
1004 .buf_request_complete = hantro_buf_request_complete,
1005 .start_streaming = hantro_start_streaming,
1006 .stop_streaming = hantro_stop_streaming,
1007 };
1008