1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Rockchip Video Decoder VP9 backend
4 *
5 * Copyright (C) 2019 Collabora, Ltd.
6 * Boris Brezillon <boris.brezillon@collabora.com>
7 * Copyright (C) 2021 Collabora, Ltd.
8 * Andrzej Pietrasiewicz <andrzej.p@collabora.com>
9 *
10 * Copyright (C) 2016 Rockchip Electronics Co., Ltd.
11 * Alpha Lin <Alpha.Lin@rock-chips.com>
12 */
13
14 /*
15 * For following the vp9 spec please start reading this driver
16 * code from rkvdec_vp9_run() followed by rkvdec_vp9_done().
17 */
18
19 #include <linux/kernel.h>
20 #include <linux/vmalloc.h>
21 #include <media/v4l2-mem2mem.h>
22 #include <media/v4l2-vp9.h>
23
24 #include "rkvdec.h"
25 #include "rkvdec-regs.h"
26
27 #define RKVDEC_VP9_PROBE_SIZE 4864
28 #define RKVDEC_VP9_COUNT_SIZE 13232
29 #define RKVDEC_VP9_MAX_SEGMAP_SIZE 73728
30
31 struct rkvdec_vp9_intra_mode_probs {
32 u8 y_mode[105];
33 u8 uv_mode[23];
34 };
35
36 struct rkvdec_vp9_intra_only_frame_probs {
37 u8 coef_intra[4][2][128];
38 struct rkvdec_vp9_intra_mode_probs intra_mode[10];
39 };
40
41 struct rkvdec_vp9_inter_frame_probs {
42 u8 y_mode[4][9];
43 u8 comp_mode[5];
44 u8 comp_ref[5];
45 u8 single_ref[5][2];
46 u8 inter_mode[7][3];
47 u8 interp_filter[4][2];
48 u8 padding0[11];
49 u8 coef[2][4][2][128];
50 u8 uv_mode_0_2[3][9];
51 u8 padding1[5];
52 u8 uv_mode_3_5[3][9];
53 u8 padding2[5];
54 u8 uv_mode_6_8[3][9];
55 u8 padding3[5];
56 u8 uv_mode_9[9];
57 u8 padding4[7];
58 u8 padding5[16];
59 struct {
60 u8 joint[3];
61 u8 sign[2];
62 u8 classes[2][10];
63 u8 class0_bit[2];
64 u8 bits[2][10];
65 u8 class0_fr[2][2][3];
66 u8 fr[2][3];
67 u8 class0_hp[2];
68 u8 hp[2];
69 } mv;
70 };
71
72 struct rkvdec_vp9_probs {
73 u8 partition[16][3];
74 u8 pred[3];
75 u8 tree[7];
76 u8 skip[3];
77 u8 tx32[2][3];
78 u8 tx16[2][2];
79 u8 tx8[2][1];
80 u8 is_inter[4];
81 /* 128 bit alignment */
82 u8 padding0[3];
83 union {
84 struct rkvdec_vp9_inter_frame_probs inter;
85 struct rkvdec_vp9_intra_only_frame_probs intra_only;
86 };
87 /* 128 bit alignment */
88 u8 padding1[11];
89 };
90
91 /* Data structure describing auxiliary buffer format. */
92 struct rkvdec_vp9_priv_tbl {
93 struct rkvdec_vp9_probs probs;
94 u8 segmap[2][RKVDEC_VP9_MAX_SEGMAP_SIZE];
95 };
96
97 struct rkvdec_vp9_refs_counts {
98 u32 eob[2];
99 u32 coeff[3];
100 };
101
102 struct rkvdec_vp9_inter_frame_symbol_counts {
103 u32 partition[16][4];
104 u32 skip[3][2];
105 u32 inter[4][2];
106 u32 tx32p[2][4];
107 u32 tx16p[2][4];
108 u32 tx8p[2][2];
109 u32 y_mode[4][10];
110 u32 uv_mode[10][10];
111 u32 comp[5][2];
112 u32 comp_ref[5][2];
113 u32 single_ref[5][2][2];
114 u32 mv_mode[7][4];
115 u32 filter[4][3];
116 u32 mv_joint[4];
117 u32 sign[2][2];
118 /* add 1 element for align */
119 u32 classes[2][11 + 1];
120 u32 class0[2][2];
121 u32 bits[2][10][2];
122 u32 class0_fp[2][2][4];
123 u32 fp[2][4];
124 u32 class0_hp[2][2];
125 u32 hp[2][2];
126 struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
127 };
128
129 struct rkvdec_vp9_intra_frame_symbol_counts {
130 u32 partition[4][4][4];
131 u32 skip[3][2];
132 u32 intra[4][2];
133 u32 tx32p[2][4];
134 u32 tx16p[2][4];
135 u32 tx8p[2][2];
136 struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
137 };
138
139 struct rkvdec_vp9_run {
140 struct rkvdec_run base;
141 const struct v4l2_ctrl_vp9_frame *decode_params;
142 };
143
144 struct rkvdec_vp9_frame_info {
145 u32 valid : 1;
146 u32 segmapid : 1;
147 u32 frame_context_idx : 2;
148 u32 reference_mode : 2;
149 u32 tx_mode : 3;
150 u32 interpolation_filter : 3;
151 u32 flags;
152 u64 timestamp;
153 struct v4l2_vp9_segmentation seg;
154 struct v4l2_vp9_loop_filter lf;
155 };
156
157 struct rkvdec_vp9_ctx {
158 struct rkvdec_aux_buf priv_tbl;
159 struct rkvdec_aux_buf count_tbl;
160 struct v4l2_vp9_frame_symbol_counts inter_cnts;
161 struct v4l2_vp9_frame_symbol_counts intra_cnts;
162 struct v4l2_vp9_frame_context probability_tables;
163 struct v4l2_vp9_frame_context frame_context[4];
164 struct rkvdec_vp9_frame_info cur;
165 struct rkvdec_vp9_frame_info last;
166 struct rkvdec_regs regs;
167 };
168
write_coeff_plane(const u8 coef[6][6][3],u8 * coeff_plane)169 static void write_coeff_plane(const u8 coef[6][6][3], u8 *coeff_plane)
170 {
171 unsigned int idx = 0, byte_count = 0;
172 int k, m, n;
173 u8 p;
174
175 for (k = 0; k < 6; k++) {
176 for (m = 0; m < 6; m++) {
177 for (n = 0; n < 3; n++) {
178 p = coef[k][m][n];
179 coeff_plane[idx++] = p;
180 byte_count++;
181 if (byte_count == 27) {
182 idx += 5;
183 byte_count = 0;
184 }
185 }
186 }
187 }
188 }
189
init_intra_only_probs(struct rkvdec_ctx * ctx,const struct rkvdec_vp9_run * run)190 static void init_intra_only_probs(struct rkvdec_ctx *ctx,
191 const struct rkvdec_vp9_run *run)
192 {
193 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
194 struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
195 struct rkvdec_vp9_intra_only_frame_probs *rkprobs;
196 const struct v4l2_vp9_frame_context *probs;
197 unsigned int i, j, k;
198
199 rkprobs = &tbl->probs.intra_only;
200 probs = &vp9_ctx->probability_tables;
201
202 /*
203 * intra only 149 x 128 bits ,aligned to 152 x 128 bits coeff related
204 * prob 64 x 128 bits
205 */
206 for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
207 for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++)
208 write_coeff_plane(probs->coef[i][j][0],
209 rkprobs->coef_intra[i][j]);
210 }
211
212 /* intra mode prob 80 x 128 bits */
213 for (i = 0; i < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob); i++) {
214 unsigned int byte_count = 0;
215 int idx = 0;
216
217 /* vp9_kf_y_mode_prob */
218 for (j = 0; j < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0]); j++) {
219 for (k = 0; k < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0][0]);
220 k++) {
221 u8 val = v4l2_vp9_kf_y_mode_prob[i][j][k];
222
223 rkprobs->intra_mode[i].y_mode[idx++] = val;
224 byte_count++;
225 if (byte_count == 27) {
226 byte_count = 0;
227 idx += 5;
228 }
229 }
230 }
231 }
232
233 for (i = 0; i < sizeof(v4l2_vp9_kf_uv_mode_prob); ++i) {
234 const u8 *ptr = (const u8 *)v4l2_vp9_kf_uv_mode_prob;
235
236 rkprobs->intra_mode[i / 23].uv_mode[i % 23] = ptr[i];
237 }
238 }
239
init_inter_probs(struct rkvdec_ctx * ctx,const struct rkvdec_vp9_run * run)240 static void init_inter_probs(struct rkvdec_ctx *ctx,
241 const struct rkvdec_vp9_run *run)
242 {
243 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
244 struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
245 struct rkvdec_vp9_inter_frame_probs *rkprobs;
246 const struct v4l2_vp9_frame_context *probs;
247 unsigned int i, j, k;
248
249 rkprobs = &tbl->probs.inter;
250 probs = &vp9_ctx->probability_tables;
251
252 /*
253 * inter probs
254 * 151 x 128 bits, aligned to 152 x 128 bits
255 * inter only
256 * intra_y_mode & inter_block info 6 x 128 bits
257 */
258
259 memcpy(rkprobs->y_mode, probs->y_mode, sizeof(rkprobs->y_mode));
260 memcpy(rkprobs->comp_mode, probs->comp_mode,
261 sizeof(rkprobs->comp_mode));
262 memcpy(rkprobs->comp_ref, probs->comp_ref,
263 sizeof(rkprobs->comp_ref));
264 memcpy(rkprobs->single_ref, probs->single_ref,
265 sizeof(rkprobs->single_ref));
266 memcpy(rkprobs->inter_mode, probs->inter_mode,
267 sizeof(rkprobs->inter_mode));
268 memcpy(rkprobs->interp_filter, probs->interp_filter,
269 sizeof(rkprobs->interp_filter));
270
271 /* 128 x 128 bits coeff related */
272 for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
273 for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++) {
274 for (k = 0; k < ARRAY_SIZE(probs->coef[0][0]); k++)
275 write_coeff_plane(probs->coef[i][j][k],
276 rkprobs->coef[k][i][j]);
277 }
278 }
279
280 /* intra uv mode 6 x 128 */
281 memcpy(rkprobs->uv_mode_0_2, &probs->uv_mode[0],
282 sizeof(rkprobs->uv_mode_0_2));
283 memcpy(rkprobs->uv_mode_3_5, &probs->uv_mode[3],
284 sizeof(rkprobs->uv_mode_3_5));
285 memcpy(rkprobs->uv_mode_6_8, &probs->uv_mode[6],
286 sizeof(rkprobs->uv_mode_6_8));
287 memcpy(rkprobs->uv_mode_9, &probs->uv_mode[9],
288 sizeof(rkprobs->uv_mode_9));
289
290 /* mv related 6 x 128 */
291 memcpy(rkprobs->mv.joint, probs->mv.joint,
292 sizeof(rkprobs->mv.joint));
293 memcpy(rkprobs->mv.sign, probs->mv.sign,
294 sizeof(rkprobs->mv.sign));
295 memcpy(rkprobs->mv.classes, probs->mv.classes,
296 sizeof(rkprobs->mv.classes));
297 memcpy(rkprobs->mv.class0_bit, probs->mv.class0_bit,
298 sizeof(rkprobs->mv.class0_bit));
299 memcpy(rkprobs->mv.bits, probs->mv.bits,
300 sizeof(rkprobs->mv.bits));
301 memcpy(rkprobs->mv.class0_fr, probs->mv.class0_fr,
302 sizeof(rkprobs->mv.class0_fr));
303 memcpy(rkprobs->mv.fr, probs->mv.fr,
304 sizeof(rkprobs->mv.fr));
305 memcpy(rkprobs->mv.class0_hp, probs->mv.class0_hp,
306 sizeof(rkprobs->mv.class0_hp));
307 memcpy(rkprobs->mv.hp, probs->mv.hp,
308 sizeof(rkprobs->mv.hp));
309 }
310
init_probs(struct rkvdec_ctx * ctx,const struct rkvdec_vp9_run * run)311 static void init_probs(struct rkvdec_ctx *ctx,
312 const struct rkvdec_vp9_run *run)
313 {
314 const struct v4l2_ctrl_vp9_frame *dec_params;
315 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
316 struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
317 struct rkvdec_vp9_probs *rkprobs = &tbl->probs;
318 const struct v4l2_vp9_segmentation *seg;
319 const struct v4l2_vp9_frame_context *probs;
320 bool intra_only;
321
322 dec_params = run->decode_params;
323 probs = &vp9_ctx->probability_tables;
324 seg = &dec_params->seg;
325
326 memset(rkprobs, 0, sizeof(*rkprobs));
327
328 intra_only = !!(dec_params->flags &
329 (V4L2_VP9_FRAME_FLAG_KEY_FRAME |
330 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
331
332 /* sb info 5 x 128 bit */
333 memcpy(rkprobs->partition,
334 intra_only ? v4l2_vp9_kf_partition_probs : probs->partition,
335 sizeof(rkprobs->partition));
336
337 memcpy(rkprobs->pred, seg->pred_probs, sizeof(rkprobs->pred));
338 memcpy(rkprobs->tree, seg->tree_probs, sizeof(rkprobs->tree));
339 memcpy(rkprobs->skip, probs->skip, sizeof(rkprobs->skip));
340 memcpy(rkprobs->tx32, probs->tx32, sizeof(rkprobs->tx32));
341 memcpy(rkprobs->tx16, probs->tx16, sizeof(rkprobs->tx16));
342 memcpy(rkprobs->tx8, probs->tx8, sizeof(rkprobs->tx8));
343 memcpy(rkprobs->is_inter, probs->is_inter, sizeof(rkprobs->is_inter));
344
345 if (intra_only)
346 init_intra_only_probs(ctx, run);
347 else
348 init_inter_probs(ctx, run);
349 }
350
351 static struct rkvdec_decoded_buffer *
get_ref_buf(struct rkvdec_ctx * ctx,struct vb2_v4l2_buffer * dst,u64 timestamp)352 get_ref_buf(struct rkvdec_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
353 {
354 struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
355 struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
356 struct vb2_buffer *buf;
357
358 /*
359 * If a ref is unused or invalid, address of current destination
360 * buffer is returned.
361 */
362 buf = vb2_find_buffer(cap_q, timestamp);
363 if (!buf)
364 buf = &dst->vb2_buf;
365
366 return vb2_to_rkvdec_decoded_buf(buf);
367 }
368
get_mv_base_addr(struct rkvdec_decoded_buffer * buf)369 static dma_addr_t get_mv_base_addr(struct rkvdec_decoded_buffer *buf)
370 {
371 unsigned int aligned_pitch, aligned_height, yuv_len;
372
373 aligned_height = round_up(buf->vp9.height, 64);
374 aligned_pitch = round_up(buf->vp9.width * buf->vp9.bit_depth, 512) / 8;
375 yuv_len = (aligned_height * aligned_pitch * 3) / 2;
376
377 return vb2_dma_contig_plane_dma_addr(&buf->base.vb.vb2_buf, 0) +
378 yuv_len;
379 }
380
config_ref_registers(struct rkvdec_ctx * ctx,const struct rkvdec_vp9_run * run,struct rkvdec_decoded_buffer * ref_buf,int i)381 static void config_ref_registers(struct rkvdec_ctx *ctx,
382 const struct rkvdec_vp9_run *run,
383 struct rkvdec_decoded_buffer *ref_buf,
384 int i)
385 {
386 unsigned int aligned_pitch, aligned_height, y_len, yuv_len;
387 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
388 struct rkvdec_regs *regs = &vp9_ctx->regs;
389
390 aligned_height = round_up(ref_buf->vp9.height, 64);
391 regs->vp9.reg17_19[i].frameheight = ref_buf->vp9.height;
392 regs->vp9.reg17_19[i].framewidth = ref_buf->vp9.width;
393
394 regs->vp9.refer_bases[i] = vb2_dma_contig_plane_dma_addr(&ref_buf->base.vb.vb2_buf, 0);
395
396 if (&ref_buf->base.vb == run->base.bufs.dst)
397 return;
398
399 aligned_pitch = round_up(ref_buf->vp9.width * ref_buf->vp9.bit_depth, 512) / 8;
400 y_len = aligned_height * aligned_pitch;
401 yuv_len = (y_len * 3) / 2;
402
403 regs->vp9.reg37_39[i].y_hor_virstride = aligned_pitch / 16;
404 regs->vp9.reg37_39[i].uv_hor_virstride = aligned_pitch / 16;
405 regs->vp9.reg48_50[i].virstride = y_len / 16;
406
407 if (!i)
408 regs->vp9.reg51.lastref_yuv_virstride = yuv_len / 16;
409 }
410
config_seg_registers(struct rkvdec_ctx * ctx,unsigned int segid)411 static void config_seg_registers(struct rkvdec_ctx *ctx, unsigned int segid)
412 {
413 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
414 struct rkvdec_regs *regs = &vp9_ctx->regs;
415 const struct v4l2_vp9_segmentation *seg;
416 s16 feature_val;
417 int feature_id;
418
419 seg = vp9_ctx->last.valid ? &vp9_ctx->last.seg : &vp9_ctx->cur.seg;
420 feature_id = V4L2_VP9_SEG_LVL_ALT_Q;
421 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
422 feature_val = seg->feature_data[segid][feature_id];
423 regs->vp9.reg20_27[segid].segid_frame_qp_delta_en = 1;
424 regs->vp9.reg20_27[segid].segid_frame_qp_delta = feature_val;
425 }
426
427 feature_id = V4L2_VP9_SEG_LVL_ALT_L;
428 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
429 feature_val = seg->feature_data[segid][feature_id];
430 regs->vp9.reg20_27[segid].segid_frame_loopfilter_value_en = 1;
431 regs->vp9.reg20_27[segid].segid_frame_loopfilter_value = feature_val;
432 }
433
434 feature_id = V4L2_VP9_SEG_LVL_REF_FRAME;
435 if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
436 feature_val = seg->feature_data[segid][feature_id];
437 regs->vp9.reg20_27[segid].segid_referinfo_en = 1;
438 regs->vp9.reg20_27[segid].segid_referinfo = feature_val;
439 }
440
441 feature_id = V4L2_VP9_SEG_LVL_SKIP;
442 regs->vp9.reg20_27[segid].segid_frame_skip_en =
443 v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid);
444
445 regs->vp9.reg20_27[segid].segid_abs_delta = !segid &&
446 (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE);
447 }
448
update_dec_buf_info(struct rkvdec_decoded_buffer * buf,const struct v4l2_ctrl_vp9_frame * dec_params)449 static void update_dec_buf_info(struct rkvdec_decoded_buffer *buf,
450 const struct v4l2_ctrl_vp9_frame *dec_params)
451 {
452 buf->vp9.width = dec_params->frame_width_minus_1 + 1;
453 buf->vp9.height = dec_params->frame_height_minus_1 + 1;
454 buf->vp9.bit_depth = dec_params->bit_depth;
455 }
456
update_ctx_cur_info(struct rkvdec_vp9_ctx * vp9_ctx,struct rkvdec_decoded_buffer * buf,const struct v4l2_ctrl_vp9_frame * dec_params)457 static void update_ctx_cur_info(struct rkvdec_vp9_ctx *vp9_ctx,
458 struct rkvdec_decoded_buffer *buf,
459 const struct v4l2_ctrl_vp9_frame *dec_params)
460 {
461 vp9_ctx->cur.valid = true;
462 vp9_ctx->cur.reference_mode = dec_params->reference_mode;
463 vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
464 vp9_ctx->cur.flags = dec_params->flags;
465 vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
466 vp9_ctx->cur.seg = dec_params->seg;
467 vp9_ctx->cur.lf = dec_params->lf;
468 }
469
update_ctx_last_info(struct rkvdec_vp9_ctx * vp9_ctx)470 static void update_ctx_last_info(struct rkvdec_vp9_ctx *vp9_ctx)
471 {
472 vp9_ctx->last = vp9_ctx->cur;
473 }
474
config_registers(struct rkvdec_ctx * ctx,const struct rkvdec_vp9_run * run)475 static void config_registers(struct rkvdec_ctx *ctx,
476 const struct rkvdec_vp9_run *run)
477 {
478 unsigned int y_len, uv_len, yuv_len, bit_depth, aligned_height, aligned_pitch, stream_len;
479 const struct v4l2_ctrl_vp9_frame *dec_params;
480 struct rkvdec_decoded_buffer *ref_bufs[3];
481 struct rkvdec_decoded_buffer *dst, *last, *mv_ref;
482 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
483 struct rkvdec_regs *regs = &vp9_ctx->regs;
484 const struct v4l2_vp9_segmentation *seg;
485 struct rkvdec_dev *rkvdec = ctx->dev;
486 dma_addr_t addr;
487 bool intra_only;
488 unsigned int i;
489
490 dec_params = run->decode_params;
491 dst = vb2_to_rkvdec_decoded_buf(&run->base.bufs.dst->vb2_buf);
492 ref_bufs[0] = get_ref_buf(ctx, &dst->base.vb, dec_params->last_frame_ts);
493 ref_bufs[1] = get_ref_buf(ctx, &dst->base.vb, dec_params->golden_frame_ts);
494 ref_bufs[2] = get_ref_buf(ctx, &dst->base.vb, dec_params->alt_frame_ts);
495
496 if (vp9_ctx->last.valid)
497 last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
498 else
499 last = dst;
500
501 update_dec_buf_info(dst, dec_params);
502 update_ctx_cur_info(vp9_ctx, dst, dec_params);
503 seg = &dec_params->seg;
504
505 intra_only = !!(dec_params->flags &
506 (V4L2_VP9_FRAME_FLAG_KEY_FRAME |
507 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
508
509 regs->common.reg02.dec_mode = RKVDEC_MODE_VP9;
510
511 bit_depth = dec_params->bit_depth;
512 aligned_height = round_up(ctx->decoded_fmt.fmt.pix_mp.height, 64);
513
514 aligned_pitch = round_up(ctx->decoded_fmt.fmt.pix_mp.width *
515 bit_depth,
516 512) / 8;
517 y_len = aligned_height * aligned_pitch;
518 uv_len = y_len / 2;
519 yuv_len = y_len + uv_len;
520
521 regs->common.reg03.y_hor_virstride = aligned_pitch / 16;
522 regs->common.reg03.uv_hor_virstride = aligned_pitch / 16;
523 regs->common.reg08.y_virstride = y_len / 16;
524 regs->common.reg09.yuv_virstride = yuv_len / 16;
525
526 stream_len = vb2_get_plane_payload(&run->base.bufs.src->vb2_buf, 0);
527
528 regs->common.stream_len = stream_len;
529
530 /*
531 * Reset count buffer, because decoder only output intra related syntax
532 * counts when decoding intra frame, but update entropy need to update
533 * all the probabilities.
534 */
535 if (intra_only)
536 memset(vp9_ctx->count_tbl.cpu, 0, vp9_ctx->count_tbl.size);
537
538 vp9_ctx->cur.segmapid = vp9_ctx->last.segmapid;
539 if (!intra_only &&
540 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
541 (!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED) ||
542 (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP)))
543 vp9_ctx->cur.segmapid++;
544
545 for (i = 0; i < ARRAY_SIZE(ref_bufs); i++)
546 config_ref_registers(ctx, run, ref_bufs[i], i);
547
548 for (i = 0; i < 8; i++)
549 config_seg_registers(ctx, i);
550
551 regs->vp9.reg28.tx_mode = vp9_ctx->cur.tx_mode;
552 regs->vp9.reg28.frame_reference_mode = dec_params->reference_mode;
553
554 if (!intra_only) {
555 const struct v4l2_vp9_loop_filter *lf;
556 s8 delta;
557
558 if (vp9_ctx->last.valid)
559 lf = &vp9_ctx->last.lf;
560 else
561 lf = &vp9_ctx->cur.lf;
562
563 for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) {
564 delta = lf->ref_deltas[i];
565 switch (i) {
566 case 0:
567 regs->vp9.reg32.ref_deltas_lastframe0 = delta;
568 break;
569 case 1:
570 regs->vp9.reg32.ref_deltas_lastframe1 = delta;
571 break;
572 case 2:
573 regs->vp9.reg32.ref_deltas_lastframe2 = delta;
574 break;
575 case 3:
576 regs->vp9.reg32.ref_deltas_lastframe3 = delta;
577 break;
578 }
579 }
580
581 for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) {
582 delta = lf->mode_deltas[i];
583 switch (i) {
584 case 0:
585 regs->vp9.reg33.mode_deltas_lastframe0 = delta;
586 break;
587 case 1:
588 regs->vp9.reg33.mode_deltas_lastframe1 = delta;
589 break;
590 }
591 }
592 }
593
594 regs->vp9.reg33.segmentation_enable_lstframe =
595 vp9_ctx->last.valid && !intra_only &&
596 vp9_ctx->last.seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED;
597
598 regs->vp9.reg33.last_show_frame =
599 vp9_ctx->last.valid &&
600 vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME;
601
602 regs->vp9.reg33.last_intra_only =
603 vp9_ctx->last.valid &&
604 vp9_ctx->last.flags &
605 (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
606
607 regs->vp9.reg33.last_widthheight_eqcur =
608 vp9_ctx->last.valid &&
609 last->vp9.width == dst->vp9.width &&
610 last->vp9.height == dst->vp9.height;
611
612 regs->vp9.reg36.lasttile_size =
613 stream_len - dec_params->compressed_header_size -
614 dec_params->uncompressed_header_size;
615
616 for (i = 0; !intra_only && i < ARRAY_SIZE(ref_bufs); i++) {
617 unsigned int refw = ref_bufs[i]->vp9.width;
618 unsigned int refh = ref_bufs[i]->vp9.height;
619 u32 hscale, vscale;
620
621 hscale = (refw << 14) / dst->vp9.width;
622 vscale = (refh << 14) / dst->vp9.height;
623
624 regs->vp9.reg29_31[i].ref_hor_scale = hscale;
625 regs->vp9.reg29_31[i].ref_ver_scale = vscale;
626 }
627
628 addr = vb2_dma_contig_plane_dma_addr(&dst->base.vb.vb2_buf, 0);
629 regs->common.decout_base = addr;
630 addr = vb2_dma_contig_plane_dma_addr(&run->base.bufs.src->vb2_buf, 0);
631 regs->common.strm_rlc_base = addr;
632
633 regs->common.cabactbl_base = vp9_ctx->priv_tbl.dma +
634 offsetof(struct rkvdec_vp9_priv_tbl, probs);
635
636 regs->vp9.count_base = vp9_ctx->count_tbl.dma;
637
638 regs->vp9.segidlast_base = vp9_ctx->priv_tbl.dma +
639 offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
640 (RKVDEC_VP9_MAX_SEGMAP_SIZE * (!vp9_ctx->cur.segmapid));
641
642 regs->vp9.segidcur_base = vp9_ctx->priv_tbl.dma +
643 offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
644 (RKVDEC_VP9_MAX_SEGMAP_SIZE * vp9_ctx->cur.segmapid);
645
646 if (!intra_only &&
647 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
648 vp9_ctx->last.valid)
649 mv_ref = last;
650 else
651 mv_ref = dst;
652
653 regs->vp9.refcolmv_base = get_mv_base_addr(mv_ref);
654
655 regs->vp9.performance_cycle = ctx->decoded_fmt.fmt.pix_mp.width |
656 (ctx->decoded_fmt.fmt.pix_mp.height << 16);
657
658 regs->vp9.reg44.strmd_error_e = 0xe;
659
660 rkvdec_memcpy_toio(rkvdec->regs, regs,
661 MIN(sizeof(*regs), sizeof(u32) * rkvdec->variant->num_regs));
662 }
663
validate_dec_params(struct rkvdec_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)664 static int validate_dec_params(struct rkvdec_ctx *ctx,
665 const struct v4l2_ctrl_vp9_frame *dec_params)
666 {
667 unsigned int aligned_width, aligned_height;
668
669 /* We only support profile 0. */
670 if (dec_params->profile != 0) {
671 dev_err(ctx->dev->dev, "unsupported profile %d\n",
672 dec_params->profile);
673 return -EINVAL;
674 }
675
676 aligned_width = round_up(dec_params->frame_width_minus_1 + 1, 64);
677 aligned_height = round_up(dec_params->frame_height_minus_1 + 1, 64);
678
679 /*
680 * Userspace should update the capture/decoded format when the
681 * resolution changes.
682 */
683 if (aligned_width != ctx->decoded_fmt.fmt.pix_mp.width ||
684 aligned_height != ctx->decoded_fmt.fmt.pix_mp.height) {
685 dev_err(ctx->dev->dev,
686 "unexpected bitstream resolution %dx%d\n",
687 dec_params->frame_width_minus_1 + 1,
688 dec_params->frame_height_minus_1 + 1);
689 return -EINVAL;
690 }
691
692 return 0;
693 }
694
rkvdec_vp9_run_preamble(struct rkvdec_ctx * ctx,struct rkvdec_vp9_run * run)695 static int rkvdec_vp9_run_preamble(struct rkvdec_ctx *ctx,
696 struct rkvdec_vp9_run *run)
697 {
698 const struct v4l2_ctrl_vp9_frame *dec_params;
699 const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
700 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
701 struct v4l2_ctrl *ctrl;
702 unsigned int fctx_idx;
703 int ret;
704
705 /* v4l2-specific stuff */
706 rkvdec_run_preamble(ctx, &run->base);
707
708 ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl,
709 V4L2_CID_STATELESS_VP9_FRAME);
710 if (WARN_ON(!ctrl))
711 return -EINVAL;
712 dec_params = ctrl->p_cur.p;
713
714 ret = validate_dec_params(ctx, dec_params);
715 if (ret)
716 return ret;
717
718 run->decode_params = dec_params;
719
720 ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
721 if (WARN_ON(!ctrl))
722 return -EINVAL;
723 prob_updates = ctrl->p_cur.p;
724 vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
725
726 /*
727 * vp9 stuff
728 *
729 * by this point the userspace has done all parts of 6.2 uncompressed_header()
730 * except this fragment:
731 * if ( FrameIsIntra || error_resilient_mode ) {
732 * setup_past_independence ( )
733 * if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
734 * reset_frame_context == 3 ) {
735 * for ( i = 0; i < 4; i ++ ) {
736 * save_probs( i )
737 * }
738 * } else if ( reset_frame_context == 2 ) {
739 * save_probs( frame_context_idx )
740 * }
741 * frame_context_idx = 0
742 * }
743 */
744 fctx_idx = v4l2_vp9_reset_frame_ctx(dec_params, vp9_ctx->frame_context);
745 vp9_ctx->cur.frame_context_idx = fctx_idx;
746
747 /* 6.1 frame(sz): load_probs() and load_probs2() */
748 vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
749
750 /*
751 * The userspace has also performed 6.3 compressed_header(), but handling the
752 * probs in a special way. All probs which need updating, except MV-related,
753 * have been read from the bitstream and translated through inv_map_table[],
754 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
755 * by userspace are either translated values (there are no 0 values in
756 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
757 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
758 * performed. The values passed by userspace are either new values
759 * to replace old ones (the above mentioned shift and bitwise or never result in
760 * a zero) or zero to indicate no update.
761 * fw_update_probs() performs actual probs updates or leaves probs as-is
762 * for values for which a zero was passed from userspace.
763 */
764 v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, dec_params);
765
766 return 0;
767 }
768
rkvdec_vp9_run(struct rkvdec_ctx * ctx)769 static int rkvdec_vp9_run(struct rkvdec_ctx *ctx)
770 {
771 struct rkvdec_dev *rkvdec = ctx->dev;
772 struct rkvdec_vp9_run run = { };
773 int ret;
774
775 ret = rkvdec_vp9_run_preamble(ctx, &run);
776 if (ret) {
777 rkvdec_run_postamble(ctx, &run.base);
778 return ret;
779 }
780
781 /* Prepare probs. */
782 init_probs(ctx, &run);
783
784 /* Configure hardware registers. */
785 config_registers(ctx, &run);
786
787 rkvdec_run_postamble(ctx, &run.base);
788
789 schedule_delayed_work(&rkvdec->watchdog_work, msecs_to_jiffies(2000));
790
791 writel(1, rkvdec->regs + RKVDEC_REG_PREF_LUMA_CACHE_COMMAND);
792 writel(1, rkvdec->regs + RKVDEC_REG_PREF_CHR_CACHE_COMMAND);
793
794 if (rkvdec->variant->quirks & RKVDEC_QUIRK_DISABLE_QOS)
795 rkvdec_quirks_disable_qos(ctx);
796
797 /* Start decoding! */
798 writel(RKVDEC_INTERRUPT_DEC_E | RKVDEC_CONFIG_DEC_CLK_GATE_E |
799 RKVDEC_TIMEOUT_E | RKVDEC_BUF_EMPTY_E,
800 rkvdec->regs + RKVDEC_REG_INTERRUPT);
801
802 return 0;
803 }
804
805 #define copy_tx_and_skip(p1, p2) \
806 do { \
807 memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8)); \
808 memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16)); \
809 memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32)); \
810 memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip)); \
811 } while (0)
812
rkvdec_vp9_done(struct rkvdec_ctx * ctx,struct vb2_v4l2_buffer * src_buf,struct vb2_v4l2_buffer * dst_buf,enum vb2_buffer_state result)813 static void rkvdec_vp9_done(struct rkvdec_ctx *ctx,
814 struct vb2_v4l2_buffer *src_buf,
815 struct vb2_v4l2_buffer *dst_buf,
816 enum vb2_buffer_state result)
817 {
818 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
819 unsigned int fctx_idx;
820
821 /* v4l2-specific stuff */
822 if (result == VB2_BUF_STATE_ERROR)
823 goto out_update_last;
824
825 /*
826 * vp9 stuff
827 *
828 * 6.1.2 refresh_probs()
829 *
830 * In the spec a complementary condition goes last in 6.1.2 refresh_probs(),
831 * but it makes no sense to perform all the activities from the first "if"
832 * there if we actually are not refreshing the frame context. On top of that,
833 * because of 6.2 uncompressed_header() whenever error_resilient_mode == 1,
834 * refresh_frame_context == 0. Consequently, if we don't jump to out_update_last
835 * it means error_resilient_mode must be 0.
836 */
837 if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
838 goto out_update_last;
839
840 fctx_idx = vp9_ctx->cur.frame_context_idx;
841
842 if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
843 /* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
844 struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
845 bool frame_is_intra = vp9_ctx->cur.flags &
846 (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
847 struct tx_and_skip {
848 u8 tx8[2][1];
849 u8 tx16[2][2];
850 u8 tx32[2][3];
851 u8 skip[3];
852 } _tx_skip, *tx_skip = &_tx_skip;
853 struct v4l2_vp9_frame_symbol_counts *counts;
854
855 /* buffer the forward-updated TX and skip probs */
856 if (frame_is_intra)
857 copy_tx_and_skip(tx_skip, probs);
858
859 /* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
860 *probs = vp9_ctx->frame_context[fctx_idx];
861
862 /* if FrameIsIntra then undo the effect of load_probs2() */
863 if (frame_is_intra)
864 copy_tx_and_skip(probs, tx_skip);
865
866 counts = frame_is_intra ? &vp9_ctx->intra_cnts : &vp9_ctx->inter_cnts;
867 v4l2_vp9_adapt_coef_probs(probs, counts,
868 !vp9_ctx->last.valid ||
869 vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
870 frame_is_intra);
871 if (!frame_is_intra) {
872 const struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts;
873 u32 classes[2][11];
874 int i;
875
876 inter_cnts = vp9_ctx->count_tbl.cpu;
877 for (i = 0; i < ARRAY_SIZE(classes); ++i)
878 memcpy(classes[i], inter_cnts->classes[i], sizeof(classes[0]));
879 counts->classes = &classes;
880
881 /* load_probs2() already done */
882 v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
883 vp9_ctx->cur.reference_mode,
884 vp9_ctx->cur.interpolation_filter,
885 vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
886 }
887 }
888
889 /* 6.1.2 refresh_probs(): save_probs(fctx_idx) */
890 vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
891
892 out_update_last:
893 update_ctx_last_info(vp9_ctx);
894 }
895
rkvdec_init_v4l2_vp9_count_tbl(struct rkvdec_ctx * ctx)896 static void rkvdec_init_v4l2_vp9_count_tbl(struct rkvdec_ctx *ctx)
897 {
898 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
899 struct rkvdec_vp9_intra_frame_symbol_counts *intra_cnts = vp9_ctx->count_tbl.cpu;
900 struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts = vp9_ctx->count_tbl.cpu;
901 int i, j, k, l, m;
902
903 vp9_ctx->inter_cnts.partition = &inter_cnts->partition;
904 vp9_ctx->inter_cnts.skip = &inter_cnts->skip;
905 vp9_ctx->inter_cnts.intra_inter = &inter_cnts->inter;
906 vp9_ctx->inter_cnts.tx32p = &inter_cnts->tx32p;
907 vp9_ctx->inter_cnts.tx16p = &inter_cnts->tx16p;
908 vp9_ctx->inter_cnts.tx8p = &inter_cnts->tx8p;
909
910 vp9_ctx->intra_cnts.partition = (u32 (*)[16][4])(&intra_cnts->partition);
911 vp9_ctx->intra_cnts.skip = &intra_cnts->skip;
912 vp9_ctx->intra_cnts.intra_inter = &intra_cnts->intra;
913 vp9_ctx->intra_cnts.tx32p = &intra_cnts->tx32p;
914 vp9_ctx->intra_cnts.tx16p = &intra_cnts->tx16p;
915 vp9_ctx->intra_cnts.tx8p = &intra_cnts->tx8p;
916
917 vp9_ctx->inter_cnts.y_mode = &inter_cnts->y_mode;
918 vp9_ctx->inter_cnts.uv_mode = &inter_cnts->uv_mode;
919 vp9_ctx->inter_cnts.comp = &inter_cnts->comp;
920 vp9_ctx->inter_cnts.comp_ref = &inter_cnts->comp_ref;
921 vp9_ctx->inter_cnts.single_ref = &inter_cnts->single_ref;
922 vp9_ctx->inter_cnts.mv_mode = &inter_cnts->mv_mode;
923 vp9_ctx->inter_cnts.filter = &inter_cnts->filter;
924 vp9_ctx->inter_cnts.mv_joint = &inter_cnts->mv_joint;
925 vp9_ctx->inter_cnts.sign = &inter_cnts->sign;
926 /*
927 * rk hardware actually uses "u32 classes[2][11 + 1];"
928 * instead of "u32 classes[2][11];", so this must be explicitly
929 * copied into vp9_ctx->classes when passing the data to the
930 * vp9 library function
931 */
932 vp9_ctx->inter_cnts.class0 = &inter_cnts->class0;
933 vp9_ctx->inter_cnts.bits = &inter_cnts->bits;
934 vp9_ctx->inter_cnts.class0_fp = &inter_cnts->class0_fp;
935 vp9_ctx->inter_cnts.fp = &inter_cnts->fp;
936 vp9_ctx->inter_cnts.class0_hp = &inter_cnts->class0_hp;
937 vp9_ctx->inter_cnts.hp = &inter_cnts->hp;
938
939 #define INNERMOST_LOOP \
940 do { \
941 for (m = 0; m < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0][0]); ++m) {\
942 vp9_ctx->inter_cnts.coeff[i][j][k][l][m] = \
943 &inter_cnts->ref_cnt[k][i][j][l][m].coeff; \
944 vp9_ctx->inter_cnts.eob[i][j][k][l][m][0] = \
945 &inter_cnts->ref_cnt[k][i][j][l][m].eob[0]; \
946 vp9_ctx->inter_cnts.eob[i][j][k][l][m][1] = \
947 &inter_cnts->ref_cnt[k][i][j][l][m].eob[1]; \
948 \
949 vp9_ctx->intra_cnts.coeff[i][j][k][l][m] = \
950 &intra_cnts->ref_cnt[k][i][j][l][m].coeff; \
951 vp9_ctx->intra_cnts.eob[i][j][k][l][m][0] = \
952 &intra_cnts->ref_cnt[k][i][j][l][m].eob[0]; \
953 vp9_ctx->intra_cnts.eob[i][j][k][l][m][1] = \
954 &intra_cnts->ref_cnt[k][i][j][l][m].eob[1]; \
955 } \
956 } while (0)
957
958 for (i = 0; i < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff); ++i)
959 for (j = 0; j < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0]); ++j)
960 for (k = 0; k < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0]); ++k)
961 for (l = 0; l < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0]); ++l)
962 INNERMOST_LOOP;
963 #undef INNERMOST_LOOP
964 }
965
rkvdec_vp9_start(struct rkvdec_ctx * ctx)966 static int rkvdec_vp9_start(struct rkvdec_ctx *ctx)
967 {
968 struct rkvdec_dev *rkvdec = ctx->dev;
969 struct rkvdec_vp9_priv_tbl *priv_tbl;
970 struct rkvdec_vp9_ctx *vp9_ctx;
971 unsigned char *count_tbl;
972 int ret;
973
974 vp9_ctx = kzalloc_obj(*vp9_ctx);
975 if (!vp9_ctx)
976 return -ENOMEM;
977
978 ctx->priv = vp9_ctx;
979
980 BUILD_BUG_ON(sizeof(priv_tbl->probs) % 16); /* ensure probs size is 128-bit aligned */
981 priv_tbl = dma_alloc_coherent(rkvdec->dev, sizeof(*priv_tbl),
982 &vp9_ctx->priv_tbl.dma, GFP_KERNEL);
983 if (!priv_tbl) {
984 ret = -ENOMEM;
985 goto err_free_ctx;
986 }
987
988 vp9_ctx->priv_tbl.size = sizeof(*priv_tbl);
989 vp9_ctx->priv_tbl.cpu = priv_tbl;
990
991 count_tbl = dma_alloc_coherent(rkvdec->dev, RKVDEC_VP9_COUNT_SIZE,
992 &vp9_ctx->count_tbl.dma, GFP_KERNEL);
993 if (!count_tbl) {
994 ret = -ENOMEM;
995 goto err_free_priv_tbl;
996 }
997
998 vp9_ctx->count_tbl.size = RKVDEC_VP9_COUNT_SIZE;
999 vp9_ctx->count_tbl.cpu = count_tbl;
1000 rkvdec_init_v4l2_vp9_count_tbl(ctx);
1001
1002 return 0;
1003
1004 err_free_priv_tbl:
1005 dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1006 vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1007
1008 err_free_ctx:
1009 kfree(vp9_ctx);
1010 return ret;
1011 }
1012
rkvdec_vp9_stop(struct rkvdec_ctx * ctx)1013 static void rkvdec_vp9_stop(struct rkvdec_ctx *ctx)
1014 {
1015 struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
1016 struct rkvdec_dev *rkvdec = ctx->dev;
1017
1018 dma_free_coherent(rkvdec->dev, vp9_ctx->count_tbl.size,
1019 vp9_ctx->count_tbl.cpu, vp9_ctx->count_tbl.dma);
1020 dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1021 vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1022 kfree(vp9_ctx);
1023 }
1024
rkvdec_vp9_adjust_fmt(struct rkvdec_ctx * ctx,struct v4l2_format * f)1025 static int rkvdec_vp9_adjust_fmt(struct rkvdec_ctx *ctx,
1026 struct v4l2_format *f)
1027 {
1028 struct v4l2_pix_format_mplane *fmt = &f->fmt.pix_mp;
1029
1030 fmt->num_planes = 1;
1031 if (!fmt->plane_fmt[0].sizeimage)
1032 fmt->plane_fmt[0].sizeimage = fmt->width * fmt->height * 2;
1033 return 0;
1034 }
1035
1036 const struct rkvdec_coded_fmt_ops rkvdec_vp9_fmt_ops = {
1037 .adjust_fmt = rkvdec_vp9_adjust_fmt,
1038 .start = rkvdec_vp9_start,
1039 .stop = rkvdec_vp9_stop,
1040 .run = rkvdec_vp9_run,
1041 .done = rkvdec_vp9_done,
1042 };
1043