xref: /linux/drivers/media/platform/verisilicon/hantro_g2_vp9_dec.c (revision 06d07429858317ded2db7986113a9e0129cd599b)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Hantro VP9 codec driver
4  *
5  * Copyright (C) 2021 Collabora Ltd.
6  */
7 #include "media/videobuf2-core.h"
8 #include "media/videobuf2-dma-contig.h"
9 #include "media/videobuf2-v4l2.h"
10 #include <linux/kernel.h>
11 #include <linux/vmalloc.h>
12 #include <media/v4l2-mem2mem.h>
13 #include <media/v4l2-vp9.h>
14 
15 #include "hantro.h"
16 #include "hantro_vp9.h"
17 #include "hantro_g2_regs.h"
18 
19 enum hantro_ref_frames {
20 	INTRA_FRAME = 0,
21 	LAST_FRAME = 1,
22 	GOLDEN_FRAME = 2,
23 	ALTREF_FRAME = 3,
24 	MAX_REF_FRAMES = 4
25 };
26 
start_prepare_run(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame ** dec_params)27 static int start_prepare_run(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame **dec_params)
28 {
29 	const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
30 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
31 	struct v4l2_ctrl *ctrl;
32 	unsigned int fctx_idx;
33 
34 	/* v4l2-specific stuff */
35 	hantro_start_prepare_run(ctx);
36 
37 	ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, V4L2_CID_STATELESS_VP9_FRAME);
38 	if (WARN_ON(!ctrl))
39 		return -EINVAL;
40 	*dec_params = ctrl->p_cur.p;
41 
42 	ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
43 	if (WARN_ON(!ctrl))
44 		return -EINVAL;
45 	prob_updates = ctrl->p_cur.p;
46 	vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
47 
48 	/*
49 	 * vp9 stuff
50 	 *
51 	 * by this point the userspace has done all parts of 6.2 uncompressed_header()
52 	 * except this fragment:
53 	 * if ( FrameIsIntra || error_resilient_mode ) {
54 	 *	setup_past_independence ( )
55 	 *	if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
56 	 *	     reset_frame_context == 3 ) {
57 	 *		for ( i = 0; i < 4; i ++ ) {
58 	 *			save_probs( i )
59 	 *		}
60 	 *	} else if ( reset_frame_context == 2 ) {
61 	 *		save_probs( frame_context_idx )
62 	 *	}
63 	 *	frame_context_idx = 0
64 	 * }
65 	 */
66 	fctx_idx = v4l2_vp9_reset_frame_ctx(*dec_params, vp9_ctx->frame_context);
67 	vp9_ctx->cur.frame_context_idx = fctx_idx;
68 
69 	/* 6.1 frame(sz): load_probs() and load_probs2() */
70 	vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
71 
72 	/*
73 	 * The userspace has also performed 6.3 compressed_header(), but handling the
74 	 * probs in a special way. All probs which need updating, except MV-related,
75 	 * have been read from the bitstream and translated through inv_map_table[],
76 	 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
77 	 * by userspace are either translated values (there are no 0 values in
78 	 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
79 	 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
80 	 * performed. The values passed by userspace are either new values
81 	 * to replace old ones (the above mentioned shift and bitwise or never result in
82 	 * a zero) or zero to indicate no update.
83 	 * fw_update_probs() performs actual probs updates or leaves probs as-is
84 	 * for values for which a zero was passed from userspace.
85 	 */
86 	v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, *dec_params);
87 
88 	return 0;
89 }
90 
91 static struct hantro_decoded_buffer *
get_ref_buf(struct hantro_ctx * ctx,struct vb2_v4l2_buffer * dst,u64 timestamp)92 get_ref_buf(struct hantro_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
93 {
94 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
95 	struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
96 	struct vb2_buffer *buf;
97 
98 	/*
99 	 * If a ref is unused or invalid, address of current destination
100 	 * buffer is returned.
101 	 */
102 	buf = vb2_find_buffer(cap_q, timestamp);
103 	if (!buf)
104 		buf = &dst->vb2_buf;
105 
106 	return vb2_to_hantro_decoded_buf(buf);
107 }
108 
update_dec_buf_info(struct hantro_decoded_buffer * buf,const struct v4l2_ctrl_vp9_frame * dec_params)109 static void update_dec_buf_info(struct hantro_decoded_buffer *buf,
110 				const struct v4l2_ctrl_vp9_frame *dec_params)
111 {
112 	buf->vp9.width = dec_params->frame_width_minus_1 + 1;
113 	buf->vp9.height = dec_params->frame_height_minus_1 + 1;
114 	buf->vp9.bit_depth = dec_params->bit_depth;
115 }
116 
update_ctx_cur_info(struct hantro_vp9_dec_hw_ctx * vp9_ctx,struct hantro_decoded_buffer * buf,const struct v4l2_ctrl_vp9_frame * dec_params)117 static void update_ctx_cur_info(struct hantro_vp9_dec_hw_ctx *vp9_ctx,
118 				struct hantro_decoded_buffer *buf,
119 				const struct v4l2_ctrl_vp9_frame *dec_params)
120 {
121 	vp9_ctx->cur.valid = true;
122 	vp9_ctx->cur.reference_mode = dec_params->reference_mode;
123 	vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
124 	vp9_ctx->cur.flags = dec_params->flags;
125 	vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
126 }
127 
config_output(struct hantro_ctx * ctx,struct hantro_decoded_buffer * dst,const struct v4l2_ctrl_vp9_frame * dec_params)128 static void config_output(struct hantro_ctx *ctx,
129 			  struct hantro_decoded_buffer *dst,
130 			  const struct v4l2_ctrl_vp9_frame *dec_params)
131 {
132 	dma_addr_t luma_addr, chroma_addr, mv_addr;
133 
134 	hantro_reg_write(ctx->dev, &g2_out_dis, 0);
135 	if (!ctx->dev->variant->legacy_regs)
136 		hantro_reg_write(ctx->dev, &g2_output_format, 0);
137 
138 	luma_addr = hantro_get_dec_buf_addr(ctx, &dst->base.vb.vb2_buf);
139 	hantro_write_addr(ctx->dev, G2_OUT_LUMA_ADDR, luma_addr);
140 
141 	chroma_addr = luma_addr + hantro_g2_chroma_offset(ctx);
142 	hantro_write_addr(ctx->dev, G2_OUT_CHROMA_ADDR, chroma_addr);
143 	dst->vp9.chroma_offset = hantro_g2_chroma_offset(ctx);
144 
145 	mv_addr = luma_addr + hantro_g2_motion_vectors_offset(ctx);
146 	hantro_write_addr(ctx->dev, G2_OUT_MV_ADDR, mv_addr);
147 	dst->vp9.mv_offset = hantro_g2_motion_vectors_offset(ctx);
148 }
149 
150 struct hantro_vp9_ref_reg {
151 	const struct hantro_reg width;
152 	const struct hantro_reg height;
153 	const struct hantro_reg hor_scale;
154 	const struct hantro_reg ver_scale;
155 	u32 y_base;
156 	u32 c_base;
157 };
158 
config_ref(struct hantro_ctx * ctx,struct hantro_decoded_buffer * dst,const struct hantro_vp9_ref_reg * ref_reg,const struct v4l2_ctrl_vp9_frame * dec_params,u64 ref_ts)159 static void config_ref(struct hantro_ctx *ctx,
160 		       struct hantro_decoded_buffer *dst,
161 		       const struct hantro_vp9_ref_reg *ref_reg,
162 		       const struct v4l2_ctrl_vp9_frame *dec_params,
163 		       u64 ref_ts)
164 {
165 	struct hantro_decoded_buffer *buf;
166 	dma_addr_t luma_addr, chroma_addr;
167 	u32 refw, refh;
168 
169 	buf = get_ref_buf(ctx, &dst->base.vb, ref_ts);
170 	refw = buf->vp9.width;
171 	refh = buf->vp9.height;
172 
173 	hantro_reg_write(ctx->dev, &ref_reg->width, refw);
174 	hantro_reg_write(ctx->dev, &ref_reg->height, refh);
175 
176 	hantro_reg_write(ctx->dev, &ref_reg->hor_scale, (refw << 14) / dst->vp9.width);
177 	hantro_reg_write(ctx->dev, &ref_reg->ver_scale, (refh << 14) / dst->vp9.height);
178 
179 	luma_addr = hantro_get_dec_buf_addr(ctx, &buf->base.vb.vb2_buf);
180 	hantro_write_addr(ctx->dev, ref_reg->y_base, luma_addr);
181 
182 	chroma_addr = luma_addr + buf->vp9.chroma_offset;
183 	hantro_write_addr(ctx->dev, ref_reg->c_base, chroma_addr);
184 }
185 
config_ref_registers(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params,struct hantro_decoded_buffer * dst,struct hantro_decoded_buffer * mv_ref)186 static void config_ref_registers(struct hantro_ctx *ctx,
187 				 const struct v4l2_ctrl_vp9_frame *dec_params,
188 				 struct hantro_decoded_buffer *dst,
189 				 struct hantro_decoded_buffer *mv_ref)
190 {
191 	static const struct hantro_vp9_ref_reg ref_regs[] = {
192 		{
193 			/* Last */
194 			.width = vp9_lref_width,
195 			.height = vp9_lref_height,
196 			.hor_scale = vp9_lref_hor_scale,
197 			.ver_scale = vp9_lref_ver_scale,
198 			.y_base = G2_REF_LUMA_ADDR(0),
199 			.c_base = G2_REF_CHROMA_ADDR(0),
200 		}, {
201 			/* Golden */
202 			.width = vp9_gref_width,
203 			.height = vp9_gref_height,
204 			.hor_scale = vp9_gref_hor_scale,
205 			.ver_scale = vp9_gref_ver_scale,
206 			.y_base = G2_REF_LUMA_ADDR(4),
207 			.c_base = G2_REF_CHROMA_ADDR(4),
208 		}, {
209 			/* Altref */
210 			.width = vp9_aref_width,
211 			.height = vp9_aref_height,
212 			.hor_scale = vp9_aref_hor_scale,
213 			.ver_scale = vp9_aref_ver_scale,
214 			.y_base = G2_REF_LUMA_ADDR(5),
215 			.c_base = G2_REF_CHROMA_ADDR(5),
216 		},
217 	};
218 	dma_addr_t mv_addr;
219 
220 	config_ref(ctx, dst, &ref_regs[0], dec_params, dec_params->last_frame_ts);
221 	config_ref(ctx, dst, &ref_regs[1], dec_params, dec_params->golden_frame_ts);
222 	config_ref(ctx, dst, &ref_regs[2], dec_params, dec_params->alt_frame_ts);
223 
224 	mv_addr = hantro_get_dec_buf_addr(ctx, &mv_ref->base.vb.vb2_buf) +
225 		  mv_ref->vp9.mv_offset;
226 	hantro_write_addr(ctx->dev, G2_REF_MV_ADDR(0), mv_addr);
227 
228 	hantro_reg_write(ctx->dev, &vp9_last_sign_bias,
229 			 dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_LAST ? 1 : 0);
230 
231 	hantro_reg_write(ctx->dev, &vp9_gref_sign_bias,
232 			 dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_GOLDEN ? 1 : 0);
233 
234 	hantro_reg_write(ctx->dev, &vp9_aref_sign_bias,
235 			 dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_ALT ? 1 : 0);
236 }
237 
recompute_tile_info(unsigned short * tile_info,unsigned int tiles,unsigned int sbs)238 static void recompute_tile_info(unsigned short *tile_info, unsigned int tiles, unsigned int sbs)
239 {
240 	int i;
241 	unsigned int accumulated = 0;
242 	unsigned int next_accumulated;
243 
244 	for (i = 1; i <= tiles; ++i) {
245 		next_accumulated = i * sbs / tiles;
246 		*tile_info++ = next_accumulated - accumulated;
247 		accumulated = next_accumulated;
248 	}
249 }
250 
251 static void
recompute_tile_rc_info(struct hantro_ctx * ctx,unsigned int tile_r,unsigned int tile_c,unsigned int sbs_r,unsigned int sbs_c)252 recompute_tile_rc_info(struct hantro_ctx *ctx,
253 		       unsigned int tile_r, unsigned int tile_c,
254 		       unsigned int sbs_r, unsigned int sbs_c)
255 {
256 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
257 
258 	recompute_tile_info(vp9_ctx->tile_r_info, tile_r, sbs_r);
259 	recompute_tile_info(vp9_ctx->tile_c_info, tile_c, sbs_c);
260 
261 	vp9_ctx->last_tile_r = tile_r;
262 	vp9_ctx->last_tile_c = tile_c;
263 	vp9_ctx->last_sbs_r = sbs_r;
264 	vp9_ctx->last_sbs_c = sbs_c;
265 }
266 
first_tile_row(unsigned int tile_r,unsigned int sbs_r)267 static inline unsigned int first_tile_row(unsigned int tile_r, unsigned int sbs_r)
268 {
269 	if (tile_r == sbs_r + 1)
270 		return 1;
271 
272 	if (tile_r == sbs_r + 2)
273 		return 2;
274 
275 	return 0;
276 }
277 
278 static void
fill_tile_info(struct hantro_ctx * ctx,unsigned int tile_r,unsigned int tile_c,unsigned int sbs_r,unsigned int sbs_c,unsigned short * tile_mem)279 fill_tile_info(struct hantro_ctx *ctx,
280 	       unsigned int tile_r, unsigned int tile_c,
281 	       unsigned int sbs_r, unsigned int sbs_c,
282 	       unsigned short *tile_mem)
283 {
284 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
285 	unsigned int i, j;
286 	bool first = true;
287 
288 	for (i = first_tile_row(tile_r, sbs_r); i < tile_r; ++i) {
289 		unsigned short r_info = vp9_ctx->tile_r_info[i];
290 
291 		if (first) {
292 			if (i > 0)
293 				r_info += vp9_ctx->tile_r_info[0];
294 			if (i == 2)
295 				r_info += vp9_ctx->tile_r_info[1];
296 			first = false;
297 		}
298 		for (j = 0; j < tile_c; ++j) {
299 			*tile_mem++ = vp9_ctx->tile_c_info[j];
300 			*tile_mem++ = r_info;
301 		}
302 	}
303 }
304 
305 static void
config_tiles(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params,struct hantro_decoded_buffer * dst)306 config_tiles(struct hantro_ctx *ctx,
307 	     const struct v4l2_ctrl_vp9_frame *dec_params,
308 	     struct hantro_decoded_buffer *dst)
309 {
310 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
311 	struct hantro_aux_buf *misc = &vp9_ctx->misc;
312 	struct hantro_aux_buf *tile_edge = &vp9_ctx->tile_edge;
313 	dma_addr_t addr;
314 	unsigned short *tile_mem;
315 	unsigned int rows, cols;
316 
317 	addr = misc->dma + vp9_ctx->tile_info_offset;
318 	hantro_write_addr(ctx->dev, G2_TILE_SIZES_ADDR, addr);
319 
320 	tile_mem = misc->cpu + vp9_ctx->tile_info_offset;
321 	if (dec_params->tile_cols_log2 || dec_params->tile_rows_log2) {
322 		unsigned int tile_r = (1 << dec_params->tile_rows_log2);
323 		unsigned int tile_c = (1 << dec_params->tile_cols_log2);
324 		unsigned int sbs_r = hantro_vp9_num_sbs(dst->vp9.height);
325 		unsigned int sbs_c = hantro_vp9_num_sbs(dst->vp9.width);
326 
327 		if (tile_r != vp9_ctx->last_tile_r || tile_c != vp9_ctx->last_tile_c ||
328 		    sbs_r != vp9_ctx->last_sbs_r || sbs_c != vp9_ctx->last_sbs_c)
329 			recompute_tile_rc_info(ctx, tile_r, tile_c, sbs_r, sbs_c);
330 
331 		fill_tile_info(ctx, tile_r, tile_c, sbs_r, sbs_c, tile_mem);
332 
333 		cols = tile_c;
334 		rows = tile_r;
335 		hantro_reg_write(ctx->dev, &g2_tile_e, 1);
336 	} else {
337 		tile_mem[0] = hantro_vp9_num_sbs(dst->vp9.width);
338 		tile_mem[1] = hantro_vp9_num_sbs(dst->vp9.height);
339 
340 		cols = 1;
341 		rows = 1;
342 		hantro_reg_write(ctx->dev, &g2_tile_e, 0);
343 	}
344 
345 	if (ctx->dev->variant->legacy_regs) {
346 		hantro_reg_write(ctx->dev, &g2_num_tile_cols_old, cols);
347 		hantro_reg_write(ctx->dev, &g2_num_tile_rows_old, rows);
348 	} else {
349 		hantro_reg_write(ctx->dev, &g2_num_tile_cols, cols);
350 		hantro_reg_write(ctx->dev, &g2_num_tile_rows, rows);
351 	}
352 
353 	/* provide aux buffers even if no tiles are used */
354 	addr = tile_edge->dma;
355 	hantro_write_addr(ctx->dev, G2_TILE_FILTER_ADDR, addr);
356 
357 	addr = tile_edge->dma + vp9_ctx->bsd_ctrl_offset;
358 	hantro_write_addr(ctx->dev, G2_TILE_BSD_ADDR, addr);
359 }
360 
361 static void
update_feat_and_flag(struct hantro_vp9_dec_hw_ctx * vp9_ctx,const struct v4l2_vp9_segmentation * seg,unsigned int feature,unsigned int segid)362 update_feat_and_flag(struct hantro_vp9_dec_hw_ctx *vp9_ctx,
363 		     const struct v4l2_vp9_segmentation *seg,
364 		     unsigned int feature,
365 		     unsigned int segid)
366 {
367 	u8 mask = V4L2_VP9_SEGMENT_FEATURE_ENABLED(feature);
368 
369 	vp9_ctx->feature_data[segid][feature] = seg->feature_data[segid][feature];
370 	vp9_ctx->feature_enabled[segid] &= ~mask;
371 	vp9_ctx->feature_enabled[segid] |= (seg->feature_enabled[segid] & mask);
372 }
373 
clip3(s16 x,s16 y,s16 z)374 static inline s16 clip3(s16 x, s16 y, s16 z)
375 {
376 	return (z < x) ? x : (z > y) ? y : z;
377 }
378 
feat_val_clip3(s16 feat_val,s16 feature_data,bool absolute,u8 clip)379 static s16 feat_val_clip3(s16 feat_val, s16 feature_data, bool absolute, u8 clip)
380 {
381 	if (absolute)
382 		return feature_data;
383 
384 	return clip3(0, 255, feat_val + feature_data);
385 }
386 
config_segment(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)387 static void config_segment(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
388 {
389 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
390 	const struct v4l2_vp9_segmentation *seg;
391 	s16 feat_val;
392 	unsigned char feat_id;
393 	unsigned int segid;
394 	bool segment_enabled, absolute, update_data;
395 
396 	static const struct hantro_reg seg_regs[8][V4L2_VP9_SEG_LVL_MAX] = {
397 		{ vp9_quant_seg0, vp9_filt_level_seg0, vp9_refpic_seg0, vp9_skip_seg0 },
398 		{ vp9_quant_seg1, vp9_filt_level_seg1, vp9_refpic_seg1, vp9_skip_seg1 },
399 		{ vp9_quant_seg2, vp9_filt_level_seg2, vp9_refpic_seg2, vp9_skip_seg2 },
400 		{ vp9_quant_seg3, vp9_filt_level_seg3, vp9_refpic_seg3, vp9_skip_seg3 },
401 		{ vp9_quant_seg4, vp9_filt_level_seg4, vp9_refpic_seg4, vp9_skip_seg4 },
402 		{ vp9_quant_seg5, vp9_filt_level_seg5, vp9_refpic_seg5, vp9_skip_seg5 },
403 		{ vp9_quant_seg6, vp9_filt_level_seg6, vp9_refpic_seg6, vp9_skip_seg6 },
404 		{ vp9_quant_seg7, vp9_filt_level_seg7, vp9_refpic_seg7, vp9_skip_seg7 },
405 	};
406 
407 	segment_enabled = !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED);
408 	hantro_reg_write(ctx->dev, &vp9_segment_e, segment_enabled);
409 	hantro_reg_write(ctx->dev, &vp9_segment_upd_e,
410 			 !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP));
411 	hantro_reg_write(ctx->dev, &vp9_segment_temp_upd_e,
412 			 !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE));
413 
414 	seg = &dec_params->seg;
415 	absolute = !!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE);
416 	update_data = !!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA);
417 
418 	for (segid = 0; segid < 8; ++segid) {
419 		/* Quantizer segment feature */
420 		feat_id = V4L2_VP9_SEG_LVL_ALT_Q;
421 		feat_val = dec_params->quant.base_q_idx;
422 		if (segment_enabled) {
423 			if (update_data)
424 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
425 			if (v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
426 				feat_val = feat_val_clip3(feat_val,
427 							  vp9_ctx->feature_data[segid][feat_id],
428 							  absolute, 255);
429 		}
430 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
431 
432 		/* Loop filter segment feature */
433 		feat_id = V4L2_VP9_SEG_LVL_ALT_L;
434 		feat_val = dec_params->lf.level;
435 		if (segment_enabled) {
436 			if (update_data)
437 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
438 			if (v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
439 				feat_val = feat_val_clip3(feat_val,
440 							  vp9_ctx->feature_data[segid][feat_id],
441 							  absolute, 63);
442 		}
443 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
444 
445 		/* Reference frame segment feature */
446 		feat_id = V4L2_VP9_SEG_LVL_REF_FRAME;
447 		feat_val = 0;
448 		if (segment_enabled) {
449 			if (update_data)
450 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
451 			if (!(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
452 			    v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
453 				feat_val = vp9_ctx->feature_data[segid][feat_id] + 1;
454 		}
455 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
456 
457 		/* Skip segment feature */
458 		feat_id = V4L2_VP9_SEG_LVL_SKIP;
459 		feat_val = 0;
460 		if (segment_enabled) {
461 			if (update_data)
462 				update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
463 			feat_val = v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled,
464 							     feat_id, segid) ? 1 : 0;
465 		}
466 		hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
467 	}
468 }
469 
config_loop_filter(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)470 static void config_loop_filter(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
471 {
472 	bool d = dec_params->lf.flags & V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED;
473 
474 	hantro_reg_write(ctx->dev, &vp9_filt_level, dec_params->lf.level);
475 	hantro_reg_write(ctx->dev, &g2_out_filtering_dis, dec_params->lf.level == 0);
476 	hantro_reg_write(ctx->dev, &vp9_filt_sharpness, dec_params->lf.sharpness);
477 
478 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_0, d ? dec_params->lf.ref_deltas[0] : 0);
479 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_1, d ? dec_params->lf.ref_deltas[1] : 0);
480 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_2, d ? dec_params->lf.ref_deltas[2] : 0);
481 	hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_3, d ? dec_params->lf.ref_deltas[3] : 0);
482 	hantro_reg_write(ctx->dev, &vp9_filt_mb_adj_0, d ? dec_params->lf.mode_deltas[0] : 0);
483 	hantro_reg_write(ctx->dev, &vp9_filt_mb_adj_1, d ? dec_params->lf.mode_deltas[1] : 0);
484 }
485 
config_picture_dimensions(struct hantro_ctx * ctx,struct hantro_decoded_buffer * dst)486 static void config_picture_dimensions(struct hantro_ctx *ctx, struct hantro_decoded_buffer *dst)
487 {
488 	u32 pic_w_4x4, pic_h_4x4;
489 
490 	hantro_reg_write(ctx->dev, &g2_pic_width_in_cbs, (dst->vp9.width + 7) / 8);
491 	hantro_reg_write(ctx->dev, &g2_pic_height_in_cbs, (dst->vp9.height + 7) / 8);
492 	pic_w_4x4 = roundup(dst->vp9.width, 8) >> 2;
493 	pic_h_4x4 = roundup(dst->vp9.height, 8) >> 2;
494 	hantro_reg_write(ctx->dev, &g2_pic_width_4x4, pic_w_4x4);
495 	hantro_reg_write(ctx->dev, &g2_pic_height_4x4, pic_h_4x4);
496 }
497 
498 static void
config_bit_depth(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)499 config_bit_depth(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
500 {
501 	if (ctx->dev->variant->legacy_regs) {
502 		hantro_reg_write(ctx->dev, &g2_bit_depth_y, dec_params->bit_depth);
503 		hantro_reg_write(ctx->dev, &g2_bit_depth_c, dec_params->bit_depth);
504 		hantro_reg_write(ctx->dev, &g2_pix_shift, 0);
505 	} else {
506 		hantro_reg_write(ctx->dev, &g2_bit_depth_y_minus8, dec_params->bit_depth - 8);
507 		hantro_reg_write(ctx->dev, &g2_bit_depth_c_minus8, dec_params->bit_depth - 8);
508 	}
509 }
510 
is_lossless(const struct v4l2_vp9_quantization * quant)511 static inline bool is_lossless(const struct v4l2_vp9_quantization *quant)
512 {
513 	return quant->base_q_idx == 0 && quant->delta_q_uv_ac == 0 &&
514 	       quant->delta_q_uv_dc == 0 && quant->delta_q_y_dc == 0;
515 }
516 
517 static void
config_quant(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)518 config_quant(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
519 {
520 	hantro_reg_write(ctx->dev, &vp9_qp_delta_y_dc, dec_params->quant.delta_q_y_dc);
521 	hantro_reg_write(ctx->dev, &vp9_qp_delta_ch_dc, dec_params->quant.delta_q_uv_dc);
522 	hantro_reg_write(ctx->dev, &vp9_qp_delta_ch_ac, dec_params->quant.delta_q_uv_ac);
523 	hantro_reg_write(ctx->dev, &vp9_lossless_e, is_lossless(&dec_params->quant));
524 }
525 
526 static u32
hantro_interp_filter_from_v4l2(unsigned int interpolation_filter)527 hantro_interp_filter_from_v4l2(unsigned int interpolation_filter)
528 {
529 	switch (interpolation_filter) {
530 	case V4L2_VP9_INTERP_FILTER_EIGHTTAP:
531 		return 0x1;
532 	case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SMOOTH:
533 		return 0;
534 	case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SHARP:
535 		return 0x2;
536 	case V4L2_VP9_INTERP_FILTER_BILINEAR:
537 		return 0x3;
538 	case V4L2_VP9_INTERP_FILTER_SWITCHABLE:
539 		return 0x4;
540 	}
541 
542 	return 0;
543 }
544 
545 static void
config_others(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params,bool intra_only,bool resolution_change)546 config_others(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
547 	      bool intra_only, bool resolution_change)
548 {
549 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
550 
551 	hantro_reg_write(ctx->dev, &g2_idr_pic_e, intra_only);
552 
553 	hantro_reg_write(ctx->dev, &vp9_transform_mode, vp9_ctx->cur.tx_mode);
554 
555 	hantro_reg_write(ctx->dev, &vp9_mcomp_filt_type, intra_only ?
556 		0 : hantro_interp_filter_from_v4l2(dec_params->interpolation_filter));
557 
558 	hantro_reg_write(ctx->dev, &vp9_high_prec_mv_e,
559 			 !!(dec_params->flags & V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV));
560 
561 	hantro_reg_write(ctx->dev, &vp9_comp_pred_mode, dec_params->reference_mode);
562 
563 	hantro_reg_write(ctx->dev, &g2_tempor_mvp_e,
564 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
565 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
566 			 !(vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
567 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_INTRA_ONLY) &&
568 			 !resolution_change &&
569 			 vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME
570 	);
571 
572 	hantro_reg_write(ctx->dev, &g2_write_mvs_e,
573 			 !(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME));
574 }
575 
576 static void
config_compound_reference(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)577 config_compound_reference(struct hantro_ctx *ctx,
578 			  const struct v4l2_ctrl_vp9_frame *dec_params)
579 {
580 	u32 comp_fixed_ref, comp_var_ref[2];
581 	bool last_ref_frame_sign_bias;
582 	bool golden_ref_frame_sign_bias;
583 	bool alt_ref_frame_sign_bias;
584 	bool comp_ref_allowed = 0;
585 
586 	comp_fixed_ref = 0;
587 	comp_var_ref[0] = 0;
588 	comp_var_ref[1] = 0;
589 
590 	last_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_LAST;
591 	golden_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_GOLDEN;
592 	alt_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_ALT;
593 
594 	/* 6.3.12 Frame reference mode syntax */
595 	comp_ref_allowed |= golden_ref_frame_sign_bias != last_ref_frame_sign_bias;
596 	comp_ref_allowed |= alt_ref_frame_sign_bias != last_ref_frame_sign_bias;
597 
598 	if (comp_ref_allowed) {
599 		if (last_ref_frame_sign_bias ==
600 		    golden_ref_frame_sign_bias) {
601 			comp_fixed_ref = ALTREF_FRAME;
602 			comp_var_ref[0] = LAST_FRAME;
603 			comp_var_ref[1] = GOLDEN_FRAME;
604 		} else if (last_ref_frame_sign_bias ==
605 			   alt_ref_frame_sign_bias) {
606 			comp_fixed_ref = GOLDEN_FRAME;
607 			comp_var_ref[0] = LAST_FRAME;
608 			comp_var_ref[1] = ALTREF_FRAME;
609 		} else {
610 			comp_fixed_ref = LAST_FRAME;
611 			comp_var_ref[0] = GOLDEN_FRAME;
612 			comp_var_ref[1] = ALTREF_FRAME;
613 		}
614 	}
615 
616 	hantro_reg_write(ctx->dev, &vp9_comp_pred_fixed_ref, comp_fixed_ref);
617 	hantro_reg_write(ctx->dev, &vp9_comp_pred_var_ref0, comp_var_ref[0]);
618 	hantro_reg_write(ctx->dev, &vp9_comp_pred_var_ref1, comp_var_ref[1]);
619 }
620 
621 #define INNER_LOOP \
622 do {									\
623 	for (m = 0; m < ARRAY_SIZE(adaptive->coef[0][0][0][0]); ++m) {	\
624 		memcpy(adaptive->coef[i][j][k][l][m],			\
625 		       probs->coef[i][j][k][l][m],			\
626 		       sizeof(probs->coef[i][j][k][l][m]));		\
627 									\
628 		adaptive->coef[i][j][k][l][m][3] = 0;			\
629 	}								\
630 } while (0)
631 
config_probs(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params)632 static void config_probs(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
633 {
634 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
635 	struct hantro_aux_buf *misc = &vp9_ctx->misc;
636 	struct hantro_g2_all_probs *all_probs = misc->cpu;
637 	struct hantro_g2_probs *adaptive;
638 	struct hantro_g2_mv_probs *mv;
639 	const struct v4l2_vp9_segmentation *seg = &dec_params->seg;
640 	const struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
641 	int i, j, k, l, m;
642 
643 	for (i = 0; i < ARRAY_SIZE(all_probs->kf_y_mode_prob); ++i)
644 		for (j = 0; j < ARRAY_SIZE(all_probs->kf_y_mode_prob[0]); ++j) {
645 			memcpy(all_probs->kf_y_mode_prob[i][j],
646 			       v4l2_vp9_kf_y_mode_prob[i][j],
647 			       ARRAY_SIZE(all_probs->kf_y_mode_prob[i][j]));
648 
649 			all_probs->kf_y_mode_prob_tail[i][j][0] =
650 				v4l2_vp9_kf_y_mode_prob[i][j][8];
651 		}
652 
653 	memcpy(all_probs->mb_segment_tree_probs, seg->tree_probs,
654 	       sizeof(all_probs->mb_segment_tree_probs));
655 
656 	memcpy(all_probs->segment_pred_probs, seg->pred_probs,
657 	       sizeof(all_probs->segment_pred_probs));
658 
659 	for (i = 0; i < ARRAY_SIZE(all_probs->kf_uv_mode_prob); ++i) {
660 		memcpy(all_probs->kf_uv_mode_prob[i], v4l2_vp9_kf_uv_mode_prob[i],
661 		       ARRAY_SIZE(all_probs->kf_uv_mode_prob[i]));
662 
663 		all_probs->kf_uv_mode_prob_tail[i][0] = v4l2_vp9_kf_uv_mode_prob[i][8];
664 	}
665 
666 	adaptive = &all_probs->probs;
667 
668 	for (i = 0; i < ARRAY_SIZE(adaptive->inter_mode); ++i) {
669 		memcpy(adaptive->inter_mode[i], probs->inter_mode[i],
670 		       ARRAY_SIZE(probs->inter_mode[i]));
671 
672 		adaptive->inter_mode[i][3] = 0;
673 	}
674 
675 	memcpy(adaptive->is_inter, probs->is_inter, sizeof(adaptive->is_inter));
676 
677 	for (i = 0; i < ARRAY_SIZE(adaptive->uv_mode); ++i) {
678 		memcpy(adaptive->uv_mode[i], probs->uv_mode[i],
679 		       sizeof(adaptive->uv_mode[i]));
680 		adaptive->uv_mode_tail[i][0] = probs->uv_mode[i][8];
681 	}
682 
683 	memcpy(adaptive->tx8, probs->tx8, sizeof(adaptive->tx8));
684 	memcpy(adaptive->tx16, probs->tx16, sizeof(adaptive->tx16));
685 	memcpy(adaptive->tx32, probs->tx32, sizeof(adaptive->tx32));
686 
687 	for (i = 0; i < ARRAY_SIZE(adaptive->y_mode); ++i) {
688 		memcpy(adaptive->y_mode[i], probs->y_mode[i],
689 		       ARRAY_SIZE(adaptive->y_mode[i]));
690 
691 		adaptive->y_mode_tail[i][0] = probs->y_mode[i][8];
692 	}
693 
694 	for (i = 0; i < ARRAY_SIZE(adaptive->partition[0]); ++i) {
695 		memcpy(adaptive->partition[0][i], v4l2_vp9_kf_partition_probs[i],
696 		       sizeof(v4l2_vp9_kf_partition_probs[i]));
697 
698 		adaptive->partition[0][i][3] = 0;
699 	}
700 
701 	for (i = 0; i < ARRAY_SIZE(adaptive->partition[1]); ++i) {
702 		memcpy(adaptive->partition[1][i], probs->partition[i],
703 		       sizeof(probs->partition[i]));
704 
705 		adaptive->partition[1][i][3] = 0;
706 	}
707 
708 	memcpy(adaptive->interp_filter, probs->interp_filter,
709 	       sizeof(adaptive->interp_filter));
710 
711 	memcpy(adaptive->comp_mode, probs->comp_mode, sizeof(adaptive->comp_mode));
712 
713 	memcpy(adaptive->skip, probs->skip, sizeof(adaptive->skip));
714 
715 	mv = &adaptive->mv;
716 
717 	memcpy(mv->joint, probs->mv.joint, sizeof(mv->joint));
718 	memcpy(mv->sign, probs->mv.sign, sizeof(mv->sign));
719 	memcpy(mv->class0_bit, probs->mv.class0_bit, sizeof(mv->class0_bit));
720 	memcpy(mv->fr, probs->mv.fr, sizeof(mv->fr));
721 	memcpy(mv->class0_hp, probs->mv.class0_hp, sizeof(mv->class0_hp));
722 	memcpy(mv->hp, probs->mv.hp, sizeof(mv->hp));
723 	memcpy(mv->classes, probs->mv.classes, sizeof(mv->classes));
724 	memcpy(mv->class0_fr, probs->mv.class0_fr, sizeof(mv->class0_fr));
725 	memcpy(mv->bits, probs->mv.bits, sizeof(mv->bits));
726 
727 	memcpy(adaptive->single_ref, probs->single_ref, sizeof(adaptive->single_ref));
728 
729 	memcpy(adaptive->comp_ref, probs->comp_ref, sizeof(adaptive->comp_ref));
730 
731 	for (i = 0; i < ARRAY_SIZE(adaptive->coef); ++i)
732 		for (j = 0; j < ARRAY_SIZE(adaptive->coef[0]); ++j)
733 			for (k = 0; k < ARRAY_SIZE(adaptive->coef[0][0]); ++k)
734 				for (l = 0; l < ARRAY_SIZE(adaptive->coef[0][0][0]); ++l)
735 					INNER_LOOP;
736 
737 	hantro_write_addr(ctx->dev, G2_VP9_PROBS_ADDR, misc->dma);
738 }
739 
config_counts(struct hantro_ctx * ctx)740 static void config_counts(struct hantro_ctx *ctx)
741 {
742 	struct hantro_vp9_dec_hw_ctx *vp9_dec = &ctx->vp9_dec;
743 	struct hantro_aux_buf *misc = &vp9_dec->misc;
744 	dma_addr_t addr = misc->dma + vp9_dec->ctx_counters_offset;
745 
746 	hantro_write_addr(ctx->dev, G2_VP9_CTX_COUNT_ADDR, addr);
747 }
748 
config_seg_map(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params,bool intra_only,bool update_map)749 static void config_seg_map(struct hantro_ctx *ctx,
750 			   const struct v4l2_ctrl_vp9_frame *dec_params,
751 			   bool intra_only, bool update_map)
752 {
753 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
754 	struct hantro_aux_buf *segment_map = &vp9_ctx->segment_map;
755 	dma_addr_t addr;
756 
757 	if (intra_only ||
758 	    (dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT)) {
759 		memset(segment_map->cpu, 0, segment_map->size);
760 		memset(vp9_ctx->feature_data, 0, sizeof(vp9_ctx->feature_data));
761 		memset(vp9_ctx->feature_enabled, 0, sizeof(vp9_ctx->feature_enabled));
762 	}
763 
764 	addr = segment_map->dma + vp9_ctx->active_segment * vp9_ctx->segment_map_size;
765 	hantro_write_addr(ctx->dev, G2_VP9_SEGMENT_READ_ADDR, addr);
766 
767 	addr = segment_map->dma + (1 - vp9_ctx->active_segment) * vp9_ctx->segment_map_size;
768 	hantro_write_addr(ctx->dev, G2_VP9_SEGMENT_WRITE_ADDR, addr);
769 
770 	if (update_map)
771 		vp9_ctx->active_segment = 1 - vp9_ctx->active_segment;
772 }
773 
774 static void
config_source(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params,struct vb2_v4l2_buffer * vb2_src)775 config_source(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
776 	      struct vb2_v4l2_buffer *vb2_src)
777 {
778 	dma_addr_t stream_base, tmp_addr;
779 	unsigned int headres_size;
780 	u32 src_len, start_bit, src_buf_len;
781 
782 	headres_size = dec_params->uncompressed_header_size
783 		     + dec_params->compressed_header_size;
784 
785 	stream_base = vb2_dma_contig_plane_dma_addr(&vb2_src->vb2_buf, 0);
786 
787 	tmp_addr = stream_base + headres_size;
788 	if (ctx->dev->variant->legacy_regs)
789 		hantro_write_addr(ctx->dev, G2_STREAM_ADDR, (tmp_addr & ~0xf));
790 	else
791 		hantro_write_addr(ctx->dev, G2_STREAM_ADDR, stream_base);
792 
793 	start_bit = (tmp_addr & 0xf) * 8;
794 	hantro_reg_write(ctx->dev, &g2_start_bit, start_bit);
795 
796 	src_len = vb2_get_plane_payload(&vb2_src->vb2_buf, 0);
797 	src_len += start_bit / 8 - headres_size;
798 	hantro_reg_write(ctx->dev, &g2_stream_len, src_len);
799 
800 	if (!ctx->dev->variant->legacy_regs) {
801 		tmp_addr &= ~0xf;
802 		hantro_reg_write(ctx->dev, &g2_strm_start_offset, tmp_addr - stream_base);
803 		src_buf_len = vb2_plane_size(&vb2_src->vb2_buf, 0);
804 		hantro_reg_write(ctx->dev, &g2_strm_buffer_len, src_buf_len);
805 	}
806 }
807 
808 static void
config_registers(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp9_frame * dec_params,struct vb2_v4l2_buffer * vb2_src,struct vb2_v4l2_buffer * vb2_dst)809 config_registers(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
810 		 struct vb2_v4l2_buffer *vb2_src, struct vb2_v4l2_buffer *vb2_dst)
811 {
812 	struct hantro_decoded_buffer *dst, *last, *mv_ref;
813 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
814 	const struct v4l2_vp9_segmentation *seg;
815 	bool intra_only, resolution_change;
816 
817 	/* vp9 stuff */
818 	dst = vb2_to_hantro_decoded_buf(&vb2_dst->vb2_buf);
819 
820 	if (vp9_ctx->last.valid)
821 		last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
822 	else
823 		last = dst;
824 
825 	update_dec_buf_info(dst, dec_params);
826 	update_ctx_cur_info(vp9_ctx, dst, dec_params);
827 	seg = &dec_params->seg;
828 
829 	intra_only = !!(dec_params->flags &
830 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
831 			V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
832 
833 	if (!intra_only &&
834 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
835 	    vp9_ctx->last.valid)
836 		mv_ref = last;
837 	else
838 		mv_ref = dst;
839 
840 	resolution_change = dst->vp9.width != last->vp9.width ||
841 			    dst->vp9.height != last->vp9.height;
842 
843 	/* configure basic registers */
844 	hantro_reg_write(ctx->dev, &g2_mode, VP9_DEC_MODE);
845 	if (!ctx->dev->variant->legacy_regs) {
846 		hantro_reg_write(ctx->dev, &g2_strm_swap, 0xf);
847 		hantro_reg_write(ctx->dev, &g2_dirmv_swap, 0xf);
848 		hantro_reg_write(ctx->dev, &g2_compress_swap, 0xf);
849 		hantro_reg_write(ctx->dev, &g2_ref_compress_bypass, 1);
850 	} else {
851 		hantro_reg_write(ctx->dev, &g2_strm_swap_old, 0x1f);
852 		hantro_reg_write(ctx->dev, &g2_pic_swap, 0x10);
853 		hantro_reg_write(ctx->dev, &g2_dirmv_swap_old, 0x10);
854 		hantro_reg_write(ctx->dev, &g2_tab0_swap_old, 0x10);
855 		hantro_reg_write(ctx->dev, &g2_tab1_swap_old, 0x10);
856 		hantro_reg_write(ctx->dev, &g2_tab2_swap_old, 0x10);
857 		hantro_reg_write(ctx->dev, &g2_tab3_swap_old, 0x10);
858 		hantro_reg_write(ctx->dev, &g2_rscan_swap, 0x10);
859 	}
860 	hantro_reg_write(ctx->dev, &g2_buswidth, BUS_WIDTH_128);
861 	hantro_reg_write(ctx->dev, &g2_max_burst, 16);
862 	hantro_reg_write(ctx->dev, &g2_apf_threshold, 8);
863 	hantro_reg_write(ctx->dev, &g2_clk_gate_e, 1);
864 	hantro_reg_write(ctx->dev, &g2_max_cb_size, 6);
865 	hantro_reg_write(ctx->dev, &g2_min_cb_size, 3);
866 	if (ctx->dev->variant->double_buffer)
867 		hantro_reg_write(ctx->dev, &g2_double_buffer_e, 1);
868 
869 	config_output(ctx, dst, dec_params);
870 
871 	if (!intra_only)
872 		config_ref_registers(ctx, dec_params, dst, mv_ref);
873 
874 	config_tiles(ctx, dec_params, dst);
875 	config_segment(ctx, dec_params);
876 	config_loop_filter(ctx, dec_params);
877 	config_picture_dimensions(ctx, dst);
878 	config_bit_depth(ctx, dec_params);
879 	config_quant(ctx, dec_params);
880 	config_others(ctx, dec_params, intra_only, resolution_change);
881 	config_compound_reference(ctx, dec_params);
882 	config_probs(ctx, dec_params);
883 	config_counts(ctx);
884 	config_seg_map(ctx, dec_params, intra_only,
885 		       seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP);
886 	config_source(ctx, dec_params, vb2_src);
887 }
888 
hantro_g2_vp9_dec_run(struct hantro_ctx * ctx)889 int hantro_g2_vp9_dec_run(struct hantro_ctx *ctx)
890 {
891 	const struct v4l2_ctrl_vp9_frame *decode_params;
892 	struct vb2_v4l2_buffer *src;
893 	struct vb2_v4l2_buffer *dst;
894 	int ret;
895 
896 	hantro_g2_check_idle(ctx->dev);
897 
898 	ret = start_prepare_run(ctx, &decode_params);
899 	if (ret) {
900 		hantro_end_prepare_run(ctx);
901 		return ret;
902 	}
903 
904 	src = hantro_get_src_buf(ctx);
905 	dst = hantro_get_dst_buf(ctx);
906 
907 	config_registers(ctx, decode_params, src, dst);
908 
909 	hantro_end_prepare_run(ctx);
910 
911 	vdpu_write(ctx->dev, G2_REG_INTERRUPT_DEC_E, G2_REG_INTERRUPT);
912 
913 	return 0;
914 }
915 
916 #define copy_tx_and_skip(p1, p2)				\
917 do {								\
918 	memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8));	\
919 	memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16));	\
920 	memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32));	\
921 	memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip));	\
922 } while (0)
923 
hantro_g2_vp9_dec_done(struct hantro_ctx * ctx)924 void hantro_g2_vp9_dec_done(struct hantro_ctx *ctx)
925 {
926 	struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
927 	unsigned int fctx_idx;
928 
929 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
930 		goto out_update_last;
931 
932 	fctx_idx = vp9_ctx->cur.frame_context_idx;
933 
934 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
935 		/* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
936 		struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
937 		bool frame_is_intra = vp9_ctx->cur.flags &
938 		    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
939 		struct tx_and_skip {
940 			u8 tx8[2][1];
941 			u8 tx16[2][2];
942 			u8 tx32[2][3];
943 			u8 skip[3];
944 		} _tx_skip, *tx_skip = &_tx_skip;
945 		struct v4l2_vp9_frame_symbol_counts *counts;
946 		struct symbol_counts *hantro_cnts;
947 		u32 tx16p[2][4];
948 		int i;
949 
950 		/* buffer the forward-updated TX and skip probs */
951 		if (frame_is_intra)
952 			copy_tx_and_skip(tx_skip, probs);
953 
954 		/* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
955 		*probs = vp9_ctx->frame_context[fctx_idx];
956 
957 		/* if FrameIsIntra then undo the effect of load_probs2() */
958 		if (frame_is_intra)
959 			copy_tx_and_skip(probs, tx_skip);
960 
961 		counts = &vp9_ctx->cnts;
962 		hantro_cnts = vp9_ctx->misc.cpu + vp9_ctx->ctx_counters_offset;
963 		for (i = 0; i < ARRAY_SIZE(tx16p); ++i) {
964 			memcpy(tx16p[i],
965 			       hantro_cnts->tx16x16_count[i],
966 			       sizeof(hantro_cnts->tx16x16_count[0]));
967 			tx16p[i][3] = 0;
968 		}
969 		counts->tx16p = &tx16p;
970 
971 		v4l2_vp9_adapt_coef_probs(probs, counts,
972 					  !vp9_ctx->last.valid ||
973 					  vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
974 					  frame_is_intra);
975 
976 		if (!frame_is_intra) {
977 			/* load_probs2() already done */
978 			u32 mv_mode[7][4];
979 
980 			for (i = 0; i < ARRAY_SIZE(mv_mode); ++i) {
981 				mv_mode[i][0] = hantro_cnts->inter_mode_counts[i][1][0];
982 				mv_mode[i][1] = hantro_cnts->inter_mode_counts[i][2][0];
983 				mv_mode[i][2] = hantro_cnts->inter_mode_counts[i][0][0];
984 				mv_mode[i][3] = hantro_cnts->inter_mode_counts[i][2][1];
985 			}
986 			counts->mv_mode = &mv_mode;
987 			v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
988 						     vp9_ctx->cur.reference_mode,
989 						     vp9_ctx->cur.interpolation_filter,
990 						     vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
991 		}
992 	}
993 
994 	vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
995 
996 out_update_last:
997 	vp9_ctx->last = vp9_ctx->cur;
998 }
999