xref: /linux/drivers/media/platform/rockchip/rkvdec/rkvdec-vp9.c (revision 8d2b0853add1d7534dc0794e3c8e0b9e8c4ec640)
1*d968e50bSDetlev Casanova // SPDX-License-Identifier: GPL-2.0
2*d968e50bSDetlev Casanova /*
3*d968e50bSDetlev Casanova  * Rockchip Video Decoder VP9 backend
4*d968e50bSDetlev Casanova  *
5*d968e50bSDetlev Casanova  * Copyright (C) 2019 Collabora, Ltd.
6*d968e50bSDetlev Casanova  *	Boris Brezillon <boris.brezillon@collabora.com>
7*d968e50bSDetlev Casanova  * Copyright (C) 2021 Collabora, Ltd.
8*d968e50bSDetlev Casanova  *	Andrzej Pietrasiewicz <andrzej.p@collabora.com>
9*d968e50bSDetlev Casanova  *
10*d968e50bSDetlev Casanova  * Copyright (C) 2016 Rockchip Electronics Co., Ltd.
11*d968e50bSDetlev Casanova  *	Alpha Lin <Alpha.Lin@rock-chips.com>
12*d968e50bSDetlev Casanova  */
13*d968e50bSDetlev Casanova 
14*d968e50bSDetlev Casanova /*
15*d968e50bSDetlev Casanova  * For following the vp9 spec please start reading this driver
16*d968e50bSDetlev Casanova  * code from rkvdec_vp9_run() followed by rkvdec_vp9_done().
17*d968e50bSDetlev Casanova  */
18*d968e50bSDetlev Casanova 
19*d968e50bSDetlev Casanova #include <linux/kernel.h>
20*d968e50bSDetlev Casanova #include <linux/vmalloc.h>
21*d968e50bSDetlev Casanova #include <media/v4l2-mem2mem.h>
22*d968e50bSDetlev Casanova #include <media/v4l2-vp9.h>
23*d968e50bSDetlev Casanova 
24*d968e50bSDetlev Casanova #include "rkvdec.h"
25*d968e50bSDetlev Casanova #include "rkvdec-regs.h"
26*d968e50bSDetlev Casanova 
27*d968e50bSDetlev Casanova #define RKVDEC_VP9_PROBE_SIZE		4864
28*d968e50bSDetlev Casanova #define RKVDEC_VP9_COUNT_SIZE		13232
29*d968e50bSDetlev Casanova #define RKVDEC_VP9_MAX_SEGMAP_SIZE	73728
30*d968e50bSDetlev Casanova 
31*d968e50bSDetlev Casanova struct rkvdec_vp9_intra_mode_probs {
32*d968e50bSDetlev Casanova 	u8 y_mode[105];
33*d968e50bSDetlev Casanova 	u8 uv_mode[23];
34*d968e50bSDetlev Casanova };
35*d968e50bSDetlev Casanova 
36*d968e50bSDetlev Casanova struct rkvdec_vp9_intra_only_frame_probs {
37*d968e50bSDetlev Casanova 	u8 coef_intra[4][2][128];
38*d968e50bSDetlev Casanova 	struct rkvdec_vp9_intra_mode_probs intra_mode[10];
39*d968e50bSDetlev Casanova };
40*d968e50bSDetlev Casanova 
41*d968e50bSDetlev Casanova struct rkvdec_vp9_inter_frame_probs {
42*d968e50bSDetlev Casanova 	u8 y_mode[4][9];
43*d968e50bSDetlev Casanova 	u8 comp_mode[5];
44*d968e50bSDetlev Casanova 	u8 comp_ref[5];
45*d968e50bSDetlev Casanova 	u8 single_ref[5][2];
46*d968e50bSDetlev Casanova 	u8 inter_mode[7][3];
47*d968e50bSDetlev Casanova 	u8 interp_filter[4][2];
48*d968e50bSDetlev Casanova 	u8 padding0[11];
49*d968e50bSDetlev Casanova 	u8 coef[2][4][2][128];
50*d968e50bSDetlev Casanova 	u8 uv_mode_0_2[3][9];
51*d968e50bSDetlev Casanova 	u8 padding1[5];
52*d968e50bSDetlev Casanova 	u8 uv_mode_3_5[3][9];
53*d968e50bSDetlev Casanova 	u8 padding2[5];
54*d968e50bSDetlev Casanova 	u8 uv_mode_6_8[3][9];
55*d968e50bSDetlev Casanova 	u8 padding3[5];
56*d968e50bSDetlev Casanova 	u8 uv_mode_9[9];
57*d968e50bSDetlev Casanova 	u8 padding4[7];
58*d968e50bSDetlev Casanova 	u8 padding5[16];
59*d968e50bSDetlev Casanova 	struct {
60*d968e50bSDetlev Casanova 		u8 joint[3];
61*d968e50bSDetlev Casanova 		u8 sign[2];
62*d968e50bSDetlev Casanova 		u8 classes[2][10];
63*d968e50bSDetlev Casanova 		u8 class0_bit[2];
64*d968e50bSDetlev Casanova 		u8 bits[2][10];
65*d968e50bSDetlev Casanova 		u8 class0_fr[2][2][3];
66*d968e50bSDetlev Casanova 		u8 fr[2][3];
67*d968e50bSDetlev Casanova 		u8 class0_hp[2];
68*d968e50bSDetlev Casanova 		u8 hp[2];
69*d968e50bSDetlev Casanova 	} mv;
70*d968e50bSDetlev Casanova };
71*d968e50bSDetlev Casanova 
72*d968e50bSDetlev Casanova struct rkvdec_vp9_probs {
73*d968e50bSDetlev Casanova 	u8 partition[16][3];
74*d968e50bSDetlev Casanova 	u8 pred[3];
75*d968e50bSDetlev Casanova 	u8 tree[7];
76*d968e50bSDetlev Casanova 	u8 skip[3];
77*d968e50bSDetlev Casanova 	u8 tx32[2][3];
78*d968e50bSDetlev Casanova 	u8 tx16[2][2];
79*d968e50bSDetlev Casanova 	u8 tx8[2][1];
80*d968e50bSDetlev Casanova 	u8 is_inter[4];
81*d968e50bSDetlev Casanova 	/* 128 bit alignment */
82*d968e50bSDetlev Casanova 	u8 padding0[3];
83*d968e50bSDetlev Casanova 	union {
84*d968e50bSDetlev Casanova 		struct rkvdec_vp9_inter_frame_probs inter;
85*d968e50bSDetlev Casanova 		struct rkvdec_vp9_intra_only_frame_probs intra_only;
86*d968e50bSDetlev Casanova 	};
87*d968e50bSDetlev Casanova 	/* 128 bit alignment */
88*d968e50bSDetlev Casanova 	u8 padding1[11];
89*d968e50bSDetlev Casanova };
90*d968e50bSDetlev Casanova 
91*d968e50bSDetlev Casanova /* Data structure describing auxiliary buffer format. */
92*d968e50bSDetlev Casanova struct rkvdec_vp9_priv_tbl {
93*d968e50bSDetlev Casanova 	struct rkvdec_vp9_probs probs;
94*d968e50bSDetlev Casanova 	u8 segmap[2][RKVDEC_VP9_MAX_SEGMAP_SIZE];
95*d968e50bSDetlev Casanova };
96*d968e50bSDetlev Casanova 
97*d968e50bSDetlev Casanova struct rkvdec_vp9_refs_counts {
98*d968e50bSDetlev Casanova 	u32 eob[2];
99*d968e50bSDetlev Casanova 	u32 coeff[3];
100*d968e50bSDetlev Casanova };
101*d968e50bSDetlev Casanova 
102*d968e50bSDetlev Casanova struct rkvdec_vp9_inter_frame_symbol_counts {
103*d968e50bSDetlev Casanova 	u32 partition[16][4];
104*d968e50bSDetlev Casanova 	u32 skip[3][2];
105*d968e50bSDetlev Casanova 	u32 inter[4][2];
106*d968e50bSDetlev Casanova 	u32 tx32p[2][4];
107*d968e50bSDetlev Casanova 	u32 tx16p[2][4];
108*d968e50bSDetlev Casanova 	u32 tx8p[2][2];
109*d968e50bSDetlev Casanova 	u32 y_mode[4][10];
110*d968e50bSDetlev Casanova 	u32 uv_mode[10][10];
111*d968e50bSDetlev Casanova 	u32 comp[5][2];
112*d968e50bSDetlev Casanova 	u32 comp_ref[5][2];
113*d968e50bSDetlev Casanova 	u32 single_ref[5][2][2];
114*d968e50bSDetlev Casanova 	u32 mv_mode[7][4];
115*d968e50bSDetlev Casanova 	u32 filter[4][3];
116*d968e50bSDetlev Casanova 	u32 mv_joint[4];
117*d968e50bSDetlev Casanova 	u32 sign[2][2];
118*d968e50bSDetlev Casanova 	/* add 1 element for align */
119*d968e50bSDetlev Casanova 	u32 classes[2][11 + 1];
120*d968e50bSDetlev Casanova 	u32 class0[2][2];
121*d968e50bSDetlev Casanova 	u32 bits[2][10][2];
122*d968e50bSDetlev Casanova 	u32 class0_fp[2][2][4];
123*d968e50bSDetlev Casanova 	u32 fp[2][4];
124*d968e50bSDetlev Casanova 	u32 class0_hp[2][2];
125*d968e50bSDetlev Casanova 	u32 hp[2][2];
126*d968e50bSDetlev Casanova 	struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
127*d968e50bSDetlev Casanova };
128*d968e50bSDetlev Casanova 
129*d968e50bSDetlev Casanova struct rkvdec_vp9_intra_frame_symbol_counts {
130*d968e50bSDetlev Casanova 	u32 partition[4][4][4];
131*d968e50bSDetlev Casanova 	u32 skip[3][2];
132*d968e50bSDetlev Casanova 	u32 intra[4][2];
133*d968e50bSDetlev Casanova 	u32 tx32p[2][4];
134*d968e50bSDetlev Casanova 	u32 tx16p[2][4];
135*d968e50bSDetlev Casanova 	u32 tx8p[2][2];
136*d968e50bSDetlev Casanova 	struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
137*d968e50bSDetlev Casanova };
138*d968e50bSDetlev Casanova 
139*d968e50bSDetlev Casanova struct rkvdec_vp9_run {
140*d968e50bSDetlev Casanova 	struct rkvdec_run base;
141*d968e50bSDetlev Casanova 	const struct v4l2_ctrl_vp9_frame *decode_params;
142*d968e50bSDetlev Casanova };
143*d968e50bSDetlev Casanova 
144*d968e50bSDetlev Casanova struct rkvdec_vp9_frame_info {
145*d968e50bSDetlev Casanova 	u32 valid : 1;
146*d968e50bSDetlev Casanova 	u32 segmapid : 1;
147*d968e50bSDetlev Casanova 	u32 frame_context_idx : 2;
148*d968e50bSDetlev Casanova 	u32 reference_mode : 2;
149*d968e50bSDetlev Casanova 	u32 tx_mode : 3;
150*d968e50bSDetlev Casanova 	u32 interpolation_filter : 3;
151*d968e50bSDetlev Casanova 	u32 flags;
152*d968e50bSDetlev Casanova 	u64 timestamp;
153*d968e50bSDetlev Casanova 	struct v4l2_vp9_segmentation seg;
154*d968e50bSDetlev Casanova 	struct v4l2_vp9_loop_filter lf;
155*d968e50bSDetlev Casanova };
156*d968e50bSDetlev Casanova 
157*d968e50bSDetlev Casanova struct rkvdec_vp9_ctx {
158*d968e50bSDetlev Casanova 	struct rkvdec_aux_buf priv_tbl;
159*d968e50bSDetlev Casanova 	struct rkvdec_aux_buf count_tbl;
160*d968e50bSDetlev Casanova 	struct v4l2_vp9_frame_symbol_counts inter_cnts;
161*d968e50bSDetlev Casanova 	struct v4l2_vp9_frame_symbol_counts intra_cnts;
162*d968e50bSDetlev Casanova 	struct v4l2_vp9_frame_context probability_tables;
163*d968e50bSDetlev Casanova 	struct v4l2_vp9_frame_context frame_context[4];
164*d968e50bSDetlev Casanova 	struct rkvdec_vp9_frame_info cur;
165*d968e50bSDetlev Casanova 	struct rkvdec_vp9_frame_info last;
166*d968e50bSDetlev Casanova };
167*d968e50bSDetlev Casanova 
168*d968e50bSDetlev Casanova static void write_coeff_plane(const u8 coef[6][6][3], u8 *coeff_plane)
169*d968e50bSDetlev Casanova {
170*d968e50bSDetlev Casanova 	unsigned int idx = 0, byte_count = 0;
171*d968e50bSDetlev Casanova 	int k, m, n;
172*d968e50bSDetlev Casanova 	u8 p;
173*d968e50bSDetlev Casanova 
174*d968e50bSDetlev Casanova 	for (k = 0; k < 6; k++) {
175*d968e50bSDetlev Casanova 		for (m = 0; m < 6; m++) {
176*d968e50bSDetlev Casanova 			for (n = 0; n < 3; n++) {
177*d968e50bSDetlev Casanova 				p = coef[k][m][n];
178*d968e50bSDetlev Casanova 				coeff_plane[idx++] = p;
179*d968e50bSDetlev Casanova 				byte_count++;
180*d968e50bSDetlev Casanova 				if (byte_count == 27) {
181*d968e50bSDetlev Casanova 					idx += 5;
182*d968e50bSDetlev Casanova 					byte_count = 0;
183*d968e50bSDetlev Casanova 				}
184*d968e50bSDetlev Casanova 			}
185*d968e50bSDetlev Casanova 		}
186*d968e50bSDetlev Casanova 	}
187*d968e50bSDetlev Casanova }
188*d968e50bSDetlev Casanova 
189*d968e50bSDetlev Casanova static void init_intra_only_probs(struct rkvdec_ctx *ctx,
190*d968e50bSDetlev Casanova 				  const struct rkvdec_vp9_run *run)
191*d968e50bSDetlev Casanova {
192*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
193*d968e50bSDetlev Casanova 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
194*d968e50bSDetlev Casanova 	struct rkvdec_vp9_intra_only_frame_probs *rkprobs;
195*d968e50bSDetlev Casanova 	const struct v4l2_vp9_frame_context *probs;
196*d968e50bSDetlev Casanova 	unsigned int i, j, k;
197*d968e50bSDetlev Casanova 
198*d968e50bSDetlev Casanova 	rkprobs = &tbl->probs.intra_only;
199*d968e50bSDetlev Casanova 	probs = &vp9_ctx->probability_tables;
200*d968e50bSDetlev Casanova 
201*d968e50bSDetlev Casanova 	/*
202*d968e50bSDetlev Casanova 	 * intra only 149 x 128 bits ,aligned to 152 x 128 bits coeff related
203*d968e50bSDetlev Casanova 	 * prob 64 x 128 bits
204*d968e50bSDetlev Casanova 	 */
205*d968e50bSDetlev Casanova 	for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
206*d968e50bSDetlev Casanova 		for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++)
207*d968e50bSDetlev Casanova 			write_coeff_plane(probs->coef[i][j][0],
208*d968e50bSDetlev Casanova 					  rkprobs->coef_intra[i][j]);
209*d968e50bSDetlev Casanova 	}
210*d968e50bSDetlev Casanova 
211*d968e50bSDetlev Casanova 	/* intra mode prob  80 x 128 bits */
212*d968e50bSDetlev Casanova 	for (i = 0; i < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob); i++) {
213*d968e50bSDetlev Casanova 		unsigned int byte_count = 0;
214*d968e50bSDetlev Casanova 		int idx = 0;
215*d968e50bSDetlev Casanova 
216*d968e50bSDetlev Casanova 		/* vp9_kf_y_mode_prob */
217*d968e50bSDetlev Casanova 		for (j = 0; j < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0]); j++) {
218*d968e50bSDetlev Casanova 			for (k = 0; k < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0][0]);
219*d968e50bSDetlev Casanova 			     k++) {
220*d968e50bSDetlev Casanova 				u8 val = v4l2_vp9_kf_y_mode_prob[i][j][k];
221*d968e50bSDetlev Casanova 
222*d968e50bSDetlev Casanova 				rkprobs->intra_mode[i].y_mode[idx++] = val;
223*d968e50bSDetlev Casanova 				byte_count++;
224*d968e50bSDetlev Casanova 				if (byte_count == 27) {
225*d968e50bSDetlev Casanova 					byte_count = 0;
226*d968e50bSDetlev Casanova 					idx += 5;
227*d968e50bSDetlev Casanova 				}
228*d968e50bSDetlev Casanova 			}
229*d968e50bSDetlev Casanova 		}
230*d968e50bSDetlev Casanova 	}
231*d968e50bSDetlev Casanova 
232*d968e50bSDetlev Casanova 	for (i = 0; i < sizeof(v4l2_vp9_kf_uv_mode_prob); ++i) {
233*d968e50bSDetlev Casanova 		const u8 *ptr = (const u8 *)v4l2_vp9_kf_uv_mode_prob;
234*d968e50bSDetlev Casanova 
235*d968e50bSDetlev Casanova 		rkprobs->intra_mode[i / 23].uv_mode[i % 23] = ptr[i];
236*d968e50bSDetlev Casanova 	}
237*d968e50bSDetlev Casanova }
238*d968e50bSDetlev Casanova 
239*d968e50bSDetlev Casanova static void init_inter_probs(struct rkvdec_ctx *ctx,
240*d968e50bSDetlev Casanova 			     const struct rkvdec_vp9_run *run)
241*d968e50bSDetlev Casanova {
242*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
243*d968e50bSDetlev Casanova 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
244*d968e50bSDetlev Casanova 	struct rkvdec_vp9_inter_frame_probs *rkprobs;
245*d968e50bSDetlev Casanova 	const struct v4l2_vp9_frame_context *probs;
246*d968e50bSDetlev Casanova 	unsigned int i, j, k;
247*d968e50bSDetlev Casanova 
248*d968e50bSDetlev Casanova 	rkprobs = &tbl->probs.inter;
249*d968e50bSDetlev Casanova 	probs = &vp9_ctx->probability_tables;
250*d968e50bSDetlev Casanova 
251*d968e50bSDetlev Casanova 	/*
252*d968e50bSDetlev Casanova 	 * inter probs
253*d968e50bSDetlev Casanova 	 * 151 x 128 bits, aligned to 152 x 128 bits
254*d968e50bSDetlev Casanova 	 * inter only
255*d968e50bSDetlev Casanova 	 * intra_y_mode & inter_block info 6 x 128 bits
256*d968e50bSDetlev Casanova 	 */
257*d968e50bSDetlev Casanova 
258*d968e50bSDetlev Casanova 	memcpy(rkprobs->y_mode, probs->y_mode, sizeof(rkprobs->y_mode));
259*d968e50bSDetlev Casanova 	memcpy(rkprobs->comp_mode, probs->comp_mode,
260*d968e50bSDetlev Casanova 	       sizeof(rkprobs->comp_mode));
261*d968e50bSDetlev Casanova 	memcpy(rkprobs->comp_ref, probs->comp_ref,
262*d968e50bSDetlev Casanova 	       sizeof(rkprobs->comp_ref));
263*d968e50bSDetlev Casanova 	memcpy(rkprobs->single_ref, probs->single_ref,
264*d968e50bSDetlev Casanova 	       sizeof(rkprobs->single_ref));
265*d968e50bSDetlev Casanova 	memcpy(rkprobs->inter_mode, probs->inter_mode,
266*d968e50bSDetlev Casanova 	       sizeof(rkprobs->inter_mode));
267*d968e50bSDetlev Casanova 	memcpy(rkprobs->interp_filter, probs->interp_filter,
268*d968e50bSDetlev Casanova 	       sizeof(rkprobs->interp_filter));
269*d968e50bSDetlev Casanova 
270*d968e50bSDetlev Casanova 	/* 128 x 128 bits coeff related */
271*d968e50bSDetlev Casanova 	for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
272*d968e50bSDetlev Casanova 		for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++) {
273*d968e50bSDetlev Casanova 			for (k = 0; k < ARRAY_SIZE(probs->coef[0][0]); k++)
274*d968e50bSDetlev Casanova 				write_coeff_plane(probs->coef[i][j][k],
275*d968e50bSDetlev Casanova 						  rkprobs->coef[k][i][j]);
276*d968e50bSDetlev Casanova 		}
277*d968e50bSDetlev Casanova 	}
278*d968e50bSDetlev Casanova 
279*d968e50bSDetlev Casanova 	/* intra uv mode 6 x 128 */
280*d968e50bSDetlev Casanova 	memcpy(rkprobs->uv_mode_0_2, &probs->uv_mode[0],
281*d968e50bSDetlev Casanova 	       sizeof(rkprobs->uv_mode_0_2));
282*d968e50bSDetlev Casanova 	memcpy(rkprobs->uv_mode_3_5, &probs->uv_mode[3],
283*d968e50bSDetlev Casanova 	       sizeof(rkprobs->uv_mode_3_5));
284*d968e50bSDetlev Casanova 	memcpy(rkprobs->uv_mode_6_8, &probs->uv_mode[6],
285*d968e50bSDetlev Casanova 	       sizeof(rkprobs->uv_mode_6_8));
286*d968e50bSDetlev Casanova 	memcpy(rkprobs->uv_mode_9, &probs->uv_mode[9],
287*d968e50bSDetlev Casanova 	       sizeof(rkprobs->uv_mode_9));
288*d968e50bSDetlev Casanova 
289*d968e50bSDetlev Casanova 	/* mv related 6 x 128 */
290*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.joint, probs->mv.joint,
291*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.joint));
292*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.sign, probs->mv.sign,
293*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.sign));
294*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.classes, probs->mv.classes,
295*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.classes));
296*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.class0_bit, probs->mv.class0_bit,
297*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.class0_bit));
298*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.bits, probs->mv.bits,
299*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.bits));
300*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.class0_fr, probs->mv.class0_fr,
301*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.class0_fr));
302*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.fr, probs->mv.fr,
303*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.fr));
304*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.class0_hp, probs->mv.class0_hp,
305*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.class0_hp));
306*d968e50bSDetlev Casanova 	memcpy(rkprobs->mv.hp, probs->mv.hp,
307*d968e50bSDetlev Casanova 	       sizeof(rkprobs->mv.hp));
308*d968e50bSDetlev Casanova }
309*d968e50bSDetlev Casanova 
310*d968e50bSDetlev Casanova static void init_probs(struct rkvdec_ctx *ctx,
311*d968e50bSDetlev Casanova 		       const struct rkvdec_vp9_run *run)
312*d968e50bSDetlev Casanova {
313*d968e50bSDetlev Casanova 	const struct v4l2_ctrl_vp9_frame *dec_params;
314*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
315*d968e50bSDetlev Casanova 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
316*d968e50bSDetlev Casanova 	struct rkvdec_vp9_probs *rkprobs = &tbl->probs;
317*d968e50bSDetlev Casanova 	const struct v4l2_vp9_segmentation *seg;
318*d968e50bSDetlev Casanova 	const struct v4l2_vp9_frame_context *probs;
319*d968e50bSDetlev Casanova 	bool intra_only;
320*d968e50bSDetlev Casanova 
321*d968e50bSDetlev Casanova 	dec_params = run->decode_params;
322*d968e50bSDetlev Casanova 	probs = &vp9_ctx->probability_tables;
323*d968e50bSDetlev Casanova 	seg = &dec_params->seg;
324*d968e50bSDetlev Casanova 
325*d968e50bSDetlev Casanova 	memset(rkprobs, 0, sizeof(*rkprobs));
326*d968e50bSDetlev Casanova 
327*d968e50bSDetlev Casanova 	intra_only = !!(dec_params->flags &
328*d968e50bSDetlev Casanova 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
329*d968e50bSDetlev Casanova 			 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
330*d968e50bSDetlev Casanova 
331*d968e50bSDetlev Casanova 	/* sb info  5 x 128 bit */
332*d968e50bSDetlev Casanova 	memcpy(rkprobs->partition,
333*d968e50bSDetlev Casanova 	       intra_only ? v4l2_vp9_kf_partition_probs : probs->partition,
334*d968e50bSDetlev Casanova 	       sizeof(rkprobs->partition));
335*d968e50bSDetlev Casanova 
336*d968e50bSDetlev Casanova 	memcpy(rkprobs->pred, seg->pred_probs, sizeof(rkprobs->pred));
337*d968e50bSDetlev Casanova 	memcpy(rkprobs->tree, seg->tree_probs, sizeof(rkprobs->tree));
338*d968e50bSDetlev Casanova 	memcpy(rkprobs->skip, probs->skip, sizeof(rkprobs->skip));
339*d968e50bSDetlev Casanova 	memcpy(rkprobs->tx32, probs->tx32, sizeof(rkprobs->tx32));
340*d968e50bSDetlev Casanova 	memcpy(rkprobs->tx16, probs->tx16, sizeof(rkprobs->tx16));
341*d968e50bSDetlev Casanova 	memcpy(rkprobs->tx8, probs->tx8, sizeof(rkprobs->tx8));
342*d968e50bSDetlev Casanova 	memcpy(rkprobs->is_inter, probs->is_inter, sizeof(rkprobs->is_inter));
343*d968e50bSDetlev Casanova 
344*d968e50bSDetlev Casanova 	if (intra_only)
345*d968e50bSDetlev Casanova 		init_intra_only_probs(ctx, run);
346*d968e50bSDetlev Casanova 	else
347*d968e50bSDetlev Casanova 		init_inter_probs(ctx, run);
348*d968e50bSDetlev Casanova }
349*d968e50bSDetlev Casanova 
350*d968e50bSDetlev Casanova struct rkvdec_vp9_ref_reg {
351*d968e50bSDetlev Casanova 	u32 reg_frm_size;
352*d968e50bSDetlev Casanova 	u32 reg_hor_stride;
353*d968e50bSDetlev Casanova 	u32 reg_y_stride;
354*d968e50bSDetlev Casanova 	u32 reg_yuv_stride;
355*d968e50bSDetlev Casanova 	u32 reg_ref_base;
356*d968e50bSDetlev Casanova };
357*d968e50bSDetlev Casanova 
358*d968e50bSDetlev Casanova static struct rkvdec_vp9_ref_reg ref_regs[] = {
359*d968e50bSDetlev Casanova 	{
360*d968e50bSDetlev Casanova 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(0),
361*d968e50bSDetlev Casanova 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(0),
362*d968e50bSDetlev Casanova 		.reg_y_stride = RKVDEC_VP9_LAST_FRAME_YSTRIDE,
363*d968e50bSDetlev Casanova 		.reg_yuv_stride = RKVDEC_VP9_LAST_FRAME_YUVSTRIDE,
364*d968e50bSDetlev Casanova 		.reg_ref_base = RKVDEC_REG_VP9_LAST_FRAME_BASE,
365*d968e50bSDetlev Casanova 	},
366*d968e50bSDetlev Casanova 	{
367*d968e50bSDetlev Casanova 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(1),
368*d968e50bSDetlev Casanova 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(1),
369*d968e50bSDetlev Casanova 		.reg_y_stride = RKVDEC_VP9_GOLDEN_FRAME_YSTRIDE,
370*d968e50bSDetlev Casanova 		.reg_yuv_stride = 0,
371*d968e50bSDetlev Casanova 		.reg_ref_base = RKVDEC_REG_VP9_GOLDEN_FRAME_BASE,
372*d968e50bSDetlev Casanova 	},
373*d968e50bSDetlev Casanova 	{
374*d968e50bSDetlev Casanova 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(2),
375*d968e50bSDetlev Casanova 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(2),
376*d968e50bSDetlev Casanova 		.reg_y_stride = RKVDEC_VP9_ALTREF_FRAME_YSTRIDE,
377*d968e50bSDetlev Casanova 		.reg_yuv_stride = 0,
378*d968e50bSDetlev Casanova 		.reg_ref_base = RKVDEC_REG_VP9_ALTREF_FRAME_BASE,
379*d968e50bSDetlev Casanova 	}
380*d968e50bSDetlev Casanova };
381*d968e50bSDetlev Casanova 
382*d968e50bSDetlev Casanova static struct rkvdec_decoded_buffer *
383*d968e50bSDetlev Casanova get_ref_buf(struct rkvdec_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
384*d968e50bSDetlev Casanova {
385*d968e50bSDetlev Casanova 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
386*d968e50bSDetlev Casanova 	struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
387*d968e50bSDetlev Casanova 	struct vb2_buffer *buf;
388*d968e50bSDetlev Casanova 
389*d968e50bSDetlev Casanova 	/*
390*d968e50bSDetlev Casanova 	 * If a ref is unused or invalid, address of current destination
391*d968e50bSDetlev Casanova 	 * buffer is returned.
392*d968e50bSDetlev Casanova 	 */
393*d968e50bSDetlev Casanova 	buf = vb2_find_buffer(cap_q, timestamp);
394*d968e50bSDetlev Casanova 	if (!buf)
395*d968e50bSDetlev Casanova 		buf = &dst->vb2_buf;
396*d968e50bSDetlev Casanova 
397*d968e50bSDetlev Casanova 	return vb2_to_rkvdec_decoded_buf(buf);
398*d968e50bSDetlev Casanova }
399*d968e50bSDetlev Casanova 
400*d968e50bSDetlev Casanova static dma_addr_t get_mv_base_addr(struct rkvdec_decoded_buffer *buf)
401*d968e50bSDetlev Casanova {
402*d968e50bSDetlev Casanova 	unsigned int aligned_pitch, aligned_height, yuv_len;
403*d968e50bSDetlev Casanova 
404*d968e50bSDetlev Casanova 	aligned_height = round_up(buf->vp9.height, 64);
405*d968e50bSDetlev Casanova 	aligned_pitch = round_up(buf->vp9.width * buf->vp9.bit_depth, 512) / 8;
406*d968e50bSDetlev Casanova 	yuv_len = (aligned_height * aligned_pitch * 3) / 2;
407*d968e50bSDetlev Casanova 
408*d968e50bSDetlev Casanova 	return vb2_dma_contig_plane_dma_addr(&buf->base.vb.vb2_buf, 0) +
409*d968e50bSDetlev Casanova 	       yuv_len;
410*d968e50bSDetlev Casanova }
411*d968e50bSDetlev Casanova 
412*d968e50bSDetlev Casanova static void config_ref_registers(struct rkvdec_ctx *ctx,
413*d968e50bSDetlev Casanova 				 const struct rkvdec_vp9_run *run,
414*d968e50bSDetlev Casanova 				 struct rkvdec_decoded_buffer *ref_buf,
415*d968e50bSDetlev Casanova 				 struct rkvdec_vp9_ref_reg *ref_reg)
416*d968e50bSDetlev Casanova {
417*d968e50bSDetlev Casanova 	unsigned int aligned_pitch, aligned_height, y_len, yuv_len;
418*d968e50bSDetlev Casanova 	struct rkvdec_dev *rkvdec = ctx->dev;
419*d968e50bSDetlev Casanova 
420*d968e50bSDetlev Casanova 	aligned_height = round_up(ref_buf->vp9.height, 64);
421*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_VP9_FRAMEWIDTH(ref_buf->vp9.width) |
422*d968e50bSDetlev Casanova 		       RKVDEC_VP9_FRAMEHEIGHT(ref_buf->vp9.height),
423*d968e50bSDetlev Casanova 		       rkvdec->regs + ref_reg->reg_frm_size);
424*d968e50bSDetlev Casanova 
425*d968e50bSDetlev Casanova 	writel_relaxed(vb2_dma_contig_plane_dma_addr(&ref_buf->base.vb.vb2_buf, 0),
426*d968e50bSDetlev Casanova 		       rkvdec->regs + ref_reg->reg_ref_base);
427*d968e50bSDetlev Casanova 
428*d968e50bSDetlev Casanova 	if (&ref_buf->base.vb == run->base.bufs.dst)
429*d968e50bSDetlev Casanova 		return;
430*d968e50bSDetlev Casanova 
431*d968e50bSDetlev Casanova 	aligned_pitch = round_up(ref_buf->vp9.width * ref_buf->vp9.bit_depth, 512) / 8;
432*d968e50bSDetlev Casanova 	y_len = aligned_height * aligned_pitch;
433*d968e50bSDetlev Casanova 	yuv_len = (y_len * 3) / 2;
434*d968e50bSDetlev Casanova 
435*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_HOR_Y_VIRSTRIDE(aligned_pitch / 16) |
436*d968e50bSDetlev Casanova 		       RKVDEC_HOR_UV_VIRSTRIDE(aligned_pitch / 16),
437*d968e50bSDetlev Casanova 		       rkvdec->regs + ref_reg->reg_hor_stride);
438*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_VP9_REF_YSTRIDE(y_len / 16),
439*d968e50bSDetlev Casanova 		       rkvdec->regs + ref_reg->reg_y_stride);
440*d968e50bSDetlev Casanova 
441*d968e50bSDetlev Casanova 	if (!ref_reg->reg_yuv_stride)
442*d968e50bSDetlev Casanova 		return;
443*d968e50bSDetlev Casanova 
444*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_VP9_REF_YUVSTRIDE(yuv_len / 16),
445*d968e50bSDetlev Casanova 		       rkvdec->regs + ref_reg->reg_yuv_stride);
446*d968e50bSDetlev Casanova }
447*d968e50bSDetlev Casanova 
448*d968e50bSDetlev Casanova static void config_seg_registers(struct rkvdec_ctx *ctx, unsigned int segid)
449*d968e50bSDetlev Casanova {
450*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
451*d968e50bSDetlev Casanova 	const struct v4l2_vp9_segmentation *seg;
452*d968e50bSDetlev Casanova 	struct rkvdec_dev *rkvdec = ctx->dev;
453*d968e50bSDetlev Casanova 	s16 feature_val;
454*d968e50bSDetlev Casanova 	int feature_id;
455*d968e50bSDetlev Casanova 	u32 val = 0;
456*d968e50bSDetlev Casanova 
457*d968e50bSDetlev Casanova 	seg = vp9_ctx->last.valid ? &vp9_ctx->last.seg : &vp9_ctx->cur.seg;
458*d968e50bSDetlev Casanova 	feature_id = V4L2_VP9_SEG_LVL_ALT_Q;
459*d968e50bSDetlev Casanova 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
460*d968e50bSDetlev Casanova 		feature_val = seg->feature_data[segid][feature_id];
461*d968e50bSDetlev Casanova 		val |= RKVDEC_SEGID_FRAME_QP_DELTA_EN(1) |
462*d968e50bSDetlev Casanova 		       RKVDEC_SEGID_FRAME_QP_DELTA(feature_val);
463*d968e50bSDetlev Casanova 	}
464*d968e50bSDetlev Casanova 
465*d968e50bSDetlev Casanova 	feature_id = V4L2_VP9_SEG_LVL_ALT_L;
466*d968e50bSDetlev Casanova 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
467*d968e50bSDetlev Casanova 		feature_val = seg->feature_data[segid][feature_id];
468*d968e50bSDetlev Casanova 		val |= RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE_EN(1) |
469*d968e50bSDetlev Casanova 		       RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE(feature_val);
470*d968e50bSDetlev Casanova 	}
471*d968e50bSDetlev Casanova 
472*d968e50bSDetlev Casanova 	feature_id = V4L2_VP9_SEG_LVL_REF_FRAME;
473*d968e50bSDetlev Casanova 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
474*d968e50bSDetlev Casanova 		feature_val = seg->feature_data[segid][feature_id];
475*d968e50bSDetlev Casanova 		val |= RKVDEC_SEGID_REFERINFO_EN(1) |
476*d968e50bSDetlev Casanova 		       RKVDEC_SEGID_REFERINFO(feature_val);
477*d968e50bSDetlev Casanova 	}
478*d968e50bSDetlev Casanova 
479*d968e50bSDetlev Casanova 	feature_id = V4L2_VP9_SEG_LVL_SKIP;
480*d968e50bSDetlev Casanova 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid))
481*d968e50bSDetlev Casanova 		val |= RKVDEC_SEGID_FRAME_SKIP_EN(1);
482*d968e50bSDetlev Casanova 
483*d968e50bSDetlev Casanova 	if (!segid &&
484*d968e50bSDetlev Casanova 	    (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
485*d968e50bSDetlev Casanova 		val |= RKVDEC_SEGID_ABS_DELTA(1);
486*d968e50bSDetlev Casanova 
487*d968e50bSDetlev Casanova 	writel_relaxed(val, rkvdec->regs + RKVDEC_VP9_SEGID_GRP(segid));
488*d968e50bSDetlev Casanova }
489*d968e50bSDetlev Casanova 
490*d968e50bSDetlev Casanova static void update_dec_buf_info(struct rkvdec_decoded_buffer *buf,
491*d968e50bSDetlev Casanova 				const struct v4l2_ctrl_vp9_frame *dec_params)
492*d968e50bSDetlev Casanova {
493*d968e50bSDetlev Casanova 	buf->vp9.width = dec_params->frame_width_minus_1 + 1;
494*d968e50bSDetlev Casanova 	buf->vp9.height = dec_params->frame_height_minus_1 + 1;
495*d968e50bSDetlev Casanova 	buf->vp9.bit_depth = dec_params->bit_depth;
496*d968e50bSDetlev Casanova }
497*d968e50bSDetlev Casanova 
498*d968e50bSDetlev Casanova static void update_ctx_cur_info(struct rkvdec_vp9_ctx *vp9_ctx,
499*d968e50bSDetlev Casanova 				struct rkvdec_decoded_buffer *buf,
500*d968e50bSDetlev Casanova 				const struct v4l2_ctrl_vp9_frame *dec_params)
501*d968e50bSDetlev Casanova {
502*d968e50bSDetlev Casanova 	vp9_ctx->cur.valid = true;
503*d968e50bSDetlev Casanova 	vp9_ctx->cur.reference_mode = dec_params->reference_mode;
504*d968e50bSDetlev Casanova 	vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
505*d968e50bSDetlev Casanova 	vp9_ctx->cur.flags = dec_params->flags;
506*d968e50bSDetlev Casanova 	vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
507*d968e50bSDetlev Casanova 	vp9_ctx->cur.seg = dec_params->seg;
508*d968e50bSDetlev Casanova 	vp9_ctx->cur.lf = dec_params->lf;
509*d968e50bSDetlev Casanova }
510*d968e50bSDetlev Casanova 
511*d968e50bSDetlev Casanova static void update_ctx_last_info(struct rkvdec_vp9_ctx *vp9_ctx)
512*d968e50bSDetlev Casanova {
513*d968e50bSDetlev Casanova 	vp9_ctx->last = vp9_ctx->cur;
514*d968e50bSDetlev Casanova }
515*d968e50bSDetlev Casanova 
516*d968e50bSDetlev Casanova static void config_registers(struct rkvdec_ctx *ctx,
517*d968e50bSDetlev Casanova 			     const struct rkvdec_vp9_run *run)
518*d968e50bSDetlev Casanova {
519*d968e50bSDetlev Casanova 	unsigned int y_len, uv_len, yuv_len, bit_depth, aligned_height, aligned_pitch, stream_len;
520*d968e50bSDetlev Casanova 	const struct v4l2_ctrl_vp9_frame *dec_params;
521*d968e50bSDetlev Casanova 	struct rkvdec_decoded_buffer *ref_bufs[3];
522*d968e50bSDetlev Casanova 	struct rkvdec_decoded_buffer *dst, *last, *mv_ref;
523*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
524*d968e50bSDetlev Casanova 	u32 val, last_frame_info = 0;
525*d968e50bSDetlev Casanova 	const struct v4l2_vp9_segmentation *seg;
526*d968e50bSDetlev Casanova 	struct rkvdec_dev *rkvdec = ctx->dev;
527*d968e50bSDetlev Casanova 	dma_addr_t addr;
528*d968e50bSDetlev Casanova 	bool intra_only;
529*d968e50bSDetlev Casanova 	unsigned int i;
530*d968e50bSDetlev Casanova 
531*d968e50bSDetlev Casanova 	dec_params = run->decode_params;
532*d968e50bSDetlev Casanova 	dst = vb2_to_rkvdec_decoded_buf(&run->base.bufs.dst->vb2_buf);
533*d968e50bSDetlev Casanova 	ref_bufs[0] = get_ref_buf(ctx, &dst->base.vb, dec_params->last_frame_ts);
534*d968e50bSDetlev Casanova 	ref_bufs[1] = get_ref_buf(ctx, &dst->base.vb, dec_params->golden_frame_ts);
535*d968e50bSDetlev Casanova 	ref_bufs[2] = get_ref_buf(ctx, &dst->base.vb, dec_params->alt_frame_ts);
536*d968e50bSDetlev Casanova 
537*d968e50bSDetlev Casanova 	if (vp9_ctx->last.valid)
538*d968e50bSDetlev Casanova 		last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
539*d968e50bSDetlev Casanova 	else
540*d968e50bSDetlev Casanova 		last = dst;
541*d968e50bSDetlev Casanova 
542*d968e50bSDetlev Casanova 	update_dec_buf_info(dst, dec_params);
543*d968e50bSDetlev Casanova 	update_ctx_cur_info(vp9_ctx, dst, dec_params);
544*d968e50bSDetlev Casanova 	seg = &dec_params->seg;
545*d968e50bSDetlev Casanova 
546*d968e50bSDetlev Casanova 	intra_only = !!(dec_params->flags &
547*d968e50bSDetlev Casanova 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
548*d968e50bSDetlev Casanova 			 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
549*d968e50bSDetlev Casanova 
550*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_MODE(RKVDEC_MODE_VP9),
551*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_SYSCTRL);
552*d968e50bSDetlev Casanova 
553*d968e50bSDetlev Casanova 	bit_depth = dec_params->bit_depth;
554*d968e50bSDetlev Casanova 	aligned_height = round_up(ctx->decoded_fmt.fmt.pix_mp.height, 64);
555*d968e50bSDetlev Casanova 
556*d968e50bSDetlev Casanova 	aligned_pitch = round_up(ctx->decoded_fmt.fmt.pix_mp.width *
557*d968e50bSDetlev Casanova 				 bit_depth,
558*d968e50bSDetlev Casanova 				 512) / 8;
559*d968e50bSDetlev Casanova 	y_len = aligned_height * aligned_pitch;
560*d968e50bSDetlev Casanova 	uv_len = y_len / 2;
561*d968e50bSDetlev Casanova 	yuv_len = y_len + uv_len;
562*d968e50bSDetlev Casanova 
563*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_Y_HOR_VIRSTRIDE(aligned_pitch / 16) |
564*d968e50bSDetlev Casanova 		       RKVDEC_UV_HOR_VIRSTRIDE(aligned_pitch / 16),
565*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_PICPAR);
566*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_Y_VIRSTRIDE(y_len / 16),
567*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_Y_VIRSTRIDE);
568*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_YUV_VIRSTRIDE(yuv_len / 16),
569*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_YUV_VIRSTRIDE);
570*d968e50bSDetlev Casanova 
571*d968e50bSDetlev Casanova 	stream_len = vb2_get_plane_payload(&run->base.bufs.src->vb2_buf, 0);
572*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_STRM_LEN(stream_len),
573*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_STRM_LEN);
574*d968e50bSDetlev Casanova 
575*d968e50bSDetlev Casanova 	/*
576*d968e50bSDetlev Casanova 	 * Reset count buffer, because decoder only output intra related syntax
577*d968e50bSDetlev Casanova 	 * counts when decoding intra frame, but update entropy need to update
578*d968e50bSDetlev Casanova 	 * all the probabilities.
579*d968e50bSDetlev Casanova 	 */
580*d968e50bSDetlev Casanova 	if (intra_only)
581*d968e50bSDetlev Casanova 		memset(vp9_ctx->count_tbl.cpu, 0, vp9_ctx->count_tbl.size);
582*d968e50bSDetlev Casanova 
583*d968e50bSDetlev Casanova 	vp9_ctx->cur.segmapid = vp9_ctx->last.segmapid;
584*d968e50bSDetlev Casanova 	if (!intra_only &&
585*d968e50bSDetlev Casanova 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
586*d968e50bSDetlev Casanova 	    (!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED) ||
587*d968e50bSDetlev Casanova 	     (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP)))
588*d968e50bSDetlev Casanova 		vp9_ctx->cur.segmapid++;
589*d968e50bSDetlev Casanova 
590*d968e50bSDetlev Casanova 	for (i = 0; i < ARRAY_SIZE(ref_bufs); i++)
591*d968e50bSDetlev Casanova 		config_ref_registers(ctx, run, ref_bufs[i], &ref_regs[i]);
592*d968e50bSDetlev Casanova 
593*d968e50bSDetlev Casanova 	for (i = 0; i < 8; i++)
594*d968e50bSDetlev Casanova 		config_seg_registers(ctx, i);
595*d968e50bSDetlev Casanova 
596*d968e50bSDetlev Casanova 	writel_relaxed(RKVDEC_VP9_TX_MODE(vp9_ctx->cur.tx_mode) |
597*d968e50bSDetlev Casanova 		       RKVDEC_VP9_FRAME_REF_MODE(dec_params->reference_mode),
598*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_VP9_CPRHEADER_CONFIG);
599*d968e50bSDetlev Casanova 
600*d968e50bSDetlev Casanova 	if (!intra_only) {
601*d968e50bSDetlev Casanova 		const struct v4l2_vp9_loop_filter *lf;
602*d968e50bSDetlev Casanova 		s8 delta;
603*d968e50bSDetlev Casanova 
604*d968e50bSDetlev Casanova 		if (vp9_ctx->last.valid)
605*d968e50bSDetlev Casanova 			lf = &vp9_ctx->last.lf;
606*d968e50bSDetlev Casanova 		else
607*d968e50bSDetlev Casanova 			lf = &vp9_ctx->cur.lf;
608*d968e50bSDetlev Casanova 
609*d968e50bSDetlev Casanova 		val = 0;
610*d968e50bSDetlev Casanova 		for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) {
611*d968e50bSDetlev Casanova 			delta = lf->ref_deltas[i];
612*d968e50bSDetlev Casanova 			val |= RKVDEC_REF_DELTAS_LASTFRAME(i, delta);
613*d968e50bSDetlev Casanova 		}
614*d968e50bSDetlev Casanova 
615*d968e50bSDetlev Casanova 		writel_relaxed(val,
616*d968e50bSDetlev Casanova 			       rkvdec->regs + RKVDEC_VP9_REF_DELTAS_LASTFRAME);
617*d968e50bSDetlev Casanova 
618*d968e50bSDetlev Casanova 		for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) {
619*d968e50bSDetlev Casanova 			delta = lf->mode_deltas[i];
620*d968e50bSDetlev Casanova 			last_frame_info |= RKVDEC_MODE_DELTAS_LASTFRAME(i,
621*d968e50bSDetlev Casanova 									delta);
622*d968e50bSDetlev Casanova 		}
623*d968e50bSDetlev Casanova 	}
624*d968e50bSDetlev Casanova 
625*d968e50bSDetlev Casanova 	if (vp9_ctx->last.valid && !intra_only &&
626*d968e50bSDetlev Casanova 	    vp9_ctx->last.seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED)
627*d968e50bSDetlev Casanova 		last_frame_info |= RKVDEC_SEG_EN_LASTFRAME;
628*d968e50bSDetlev Casanova 
629*d968e50bSDetlev Casanova 	if (vp9_ctx->last.valid &&
630*d968e50bSDetlev Casanova 	    vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME)
631*d968e50bSDetlev Casanova 		last_frame_info |= RKVDEC_LAST_SHOW_FRAME;
632*d968e50bSDetlev Casanova 
633*d968e50bSDetlev Casanova 	if (vp9_ctx->last.valid &&
634*d968e50bSDetlev Casanova 	    vp9_ctx->last.flags &
635*d968e50bSDetlev Casanova 	    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY))
636*d968e50bSDetlev Casanova 		last_frame_info |= RKVDEC_LAST_INTRA_ONLY;
637*d968e50bSDetlev Casanova 
638*d968e50bSDetlev Casanova 	if (vp9_ctx->last.valid &&
639*d968e50bSDetlev Casanova 	    last->vp9.width == dst->vp9.width &&
640*d968e50bSDetlev Casanova 	    last->vp9.height == dst->vp9.height)
641*d968e50bSDetlev Casanova 		last_frame_info |= RKVDEC_LAST_WIDHHEIGHT_EQCUR;
642*d968e50bSDetlev Casanova 
643*d968e50bSDetlev Casanova 	writel_relaxed(last_frame_info,
644*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_VP9_INFO_LASTFRAME);
645*d968e50bSDetlev Casanova 
646*d968e50bSDetlev Casanova 	writel_relaxed(stream_len - dec_params->compressed_header_size -
647*d968e50bSDetlev Casanova 		       dec_params->uncompressed_header_size,
648*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_VP9_LASTTILE_SIZE);
649*d968e50bSDetlev Casanova 
650*d968e50bSDetlev Casanova 	for (i = 0; !intra_only && i < ARRAY_SIZE(ref_bufs); i++) {
651*d968e50bSDetlev Casanova 		unsigned int refw = ref_bufs[i]->vp9.width;
652*d968e50bSDetlev Casanova 		unsigned int refh = ref_bufs[i]->vp9.height;
653*d968e50bSDetlev Casanova 		u32 hscale, vscale;
654*d968e50bSDetlev Casanova 
655*d968e50bSDetlev Casanova 		hscale = (refw << 14) /	dst->vp9.width;
656*d968e50bSDetlev Casanova 		vscale = (refh << 14) / dst->vp9.height;
657*d968e50bSDetlev Casanova 		writel_relaxed(RKVDEC_VP9_REF_HOR_SCALE(hscale) |
658*d968e50bSDetlev Casanova 			       RKVDEC_VP9_REF_VER_SCALE(vscale),
659*d968e50bSDetlev Casanova 			       rkvdec->regs + RKVDEC_VP9_REF_SCALE(i));
660*d968e50bSDetlev Casanova 	}
661*d968e50bSDetlev Casanova 
662*d968e50bSDetlev Casanova 	addr = vb2_dma_contig_plane_dma_addr(&dst->base.vb.vb2_buf, 0);
663*d968e50bSDetlev Casanova 	writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_DECOUT_BASE);
664*d968e50bSDetlev Casanova 	addr = vb2_dma_contig_plane_dma_addr(&run->base.bufs.src->vb2_buf, 0);
665*d968e50bSDetlev Casanova 	writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_STRM_RLC_BASE);
666*d968e50bSDetlev Casanova 	writel_relaxed(vp9_ctx->priv_tbl.dma +
667*d968e50bSDetlev Casanova 		       offsetof(struct rkvdec_vp9_priv_tbl, probs),
668*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_CABACTBL_PROB_BASE);
669*d968e50bSDetlev Casanova 	writel_relaxed(vp9_ctx->count_tbl.dma,
670*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_VP9COUNT_BASE);
671*d968e50bSDetlev Casanova 
672*d968e50bSDetlev Casanova 	writel_relaxed(vp9_ctx->priv_tbl.dma +
673*d968e50bSDetlev Casanova 		       offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
674*d968e50bSDetlev Casanova 		       (RKVDEC_VP9_MAX_SEGMAP_SIZE * vp9_ctx->cur.segmapid),
675*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_VP9_SEGIDCUR_BASE);
676*d968e50bSDetlev Casanova 	writel_relaxed(vp9_ctx->priv_tbl.dma +
677*d968e50bSDetlev Casanova 		       offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
678*d968e50bSDetlev Casanova 		       (RKVDEC_VP9_MAX_SEGMAP_SIZE * (!vp9_ctx->cur.segmapid)),
679*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_VP9_SEGIDLAST_BASE);
680*d968e50bSDetlev Casanova 
681*d968e50bSDetlev Casanova 	if (!intra_only &&
682*d968e50bSDetlev Casanova 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
683*d968e50bSDetlev Casanova 	    vp9_ctx->last.valid)
684*d968e50bSDetlev Casanova 		mv_ref = last;
685*d968e50bSDetlev Casanova 	else
686*d968e50bSDetlev Casanova 		mv_ref = dst;
687*d968e50bSDetlev Casanova 
688*d968e50bSDetlev Casanova 	writel_relaxed(get_mv_base_addr(mv_ref),
689*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_VP9_REF_COLMV_BASE);
690*d968e50bSDetlev Casanova 
691*d968e50bSDetlev Casanova 	writel_relaxed(ctx->decoded_fmt.fmt.pix_mp.width |
692*d968e50bSDetlev Casanova 		       (ctx->decoded_fmt.fmt.pix_mp.height << 16),
693*d968e50bSDetlev Casanova 		       rkvdec->regs + RKVDEC_REG_PERFORMANCE_CYCLE);
694*d968e50bSDetlev Casanova }
695*d968e50bSDetlev Casanova 
696*d968e50bSDetlev Casanova static int validate_dec_params(struct rkvdec_ctx *ctx,
697*d968e50bSDetlev Casanova 			       const struct v4l2_ctrl_vp9_frame *dec_params)
698*d968e50bSDetlev Casanova {
699*d968e50bSDetlev Casanova 	unsigned int aligned_width, aligned_height;
700*d968e50bSDetlev Casanova 
701*d968e50bSDetlev Casanova 	/* We only support profile 0. */
702*d968e50bSDetlev Casanova 	if (dec_params->profile != 0) {
703*d968e50bSDetlev Casanova 		dev_err(ctx->dev->dev, "unsupported profile %d\n",
704*d968e50bSDetlev Casanova 			dec_params->profile);
705*d968e50bSDetlev Casanova 		return -EINVAL;
706*d968e50bSDetlev Casanova 	}
707*d968e50bSDetlev Casanova 
708*d968e50bSDetlev Casanova 	aligned_width = round_up(dec_params->frame_width_minus_1 + 1, 64);
709*d968e50bSDetlev Casanova 	aligned_height = round_up(dec_params->frame_height_minus_1 + 1, 64);
710*d968e50bSDetlev Casanova 
711*d968e50bSDetlev Casanova 	/*
712*d968e50bSDetlev Casanova 	 * Userspace should update the capture/decoded format when the
713*d968e50bSDetlev Casanova 	 * resolution changes.
714*d968e50bSDetlev Casanova 	 */
715*d968e50bSDetlev Casanova 	if (aligned_width != ctx->decoded_fmt.fmt.pix_mp.width ||
716*d968e50bSDetlev Casanova 	    aligned_height != ctx->decoded_fmt.fmt.pix_mp.height) {
717*d968e50bSDetlev Casanova 		dev_err(ctx->dev->dev,
718*d968e50bSDetlev Casanova 			"unexpected bitstream resolution %dx%d\n",
719*d968e50bSDetlev Casanova 			dec_params->frame_width_minus_1 + 1,
720*d968e50bSDetlev Casanova 			dec_params->frame_height_minus_1 + 1);
721*d968e50bSDetlev Casanova 		return -EINVAL;
722*d968e50bSDetlev Casanova 	}
723*d968e50bSDetlev Casanova 
724*d968e50bSDetlev Casanova 	return 0;
725*d968e50bSDetlev Casanova }
726*d968e50bSDetlev Casanova 
727*d968e50bSDetlev Casanova static int rkvdec_vp9_run_preamble(struct rkvdec_ctx *ctx,
728*d968e50bSDetlev Casanova 				   struct rkvdec_vp9_run *run)
729*d968e50bSDetlev Casanova {
730*d968e50bSDetlev Casanova 	const struct v4l2_ctrl_vp9_frame *dec_params;
731*d968e50bSDetlev Casanova 	const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
732*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
733*d968e50bSDetlev Casanova 	struct v4l2_ctrl *ctrl;
734*d968e50bSDetlev Casanova 	unsigned int fctx_idx;
735*d968e50bSDetlev Casanova 	int ret;
736*d968e50bSDetlev Casanova 
737*d968e50bSDetlev Casanova 	/* v4l2-specific stuff */
738*d968e50bSDetlev Casanova 	rkvdec_run_preamble(ctx, &run->base);
739*d968e50bSDetlev Casanova 
740*d968e50bSDetlev Casanova 	ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl,
741*d968e50bSDetlev Casanova 			      V4L2_CID_STATELESS_VP9_FRAME);
742*d968e50bSDetlev Casanova 	if (WARN_ON(!ctrl))
743*d968e50bSDetlev Casanova 		return -EINVAL;
744*d968e50bSDetlev Casanova 	dec_params = ctrl->p_cur.p;
745*d968e50bSDetlev Casanova 
746*d968e50bSDetlev Casanova 	ret = validate_dec_params(ctx, dec_params);
747*d968e50bSDetlev Casanova 	if (ret)
748*d968e50bSDetlev Casanova 		return ret;
749*d968e50bSDetlev Casanova 
750*d968e50bSDetlev Casanova 	run->decode_params = dec_params;
751*d968e50bSDetlev Casanova 
752*d968e50bSDetlev Casanova 	ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
753*d968e50bSDetlev Casanova 	if (WARN_ON(!ctrl))
754*d968e50bSDetlev Casanova 		return -EINVAL;
755*d968e50bSDetlev Casanova 	prob_updates = ctrl->p_cur.p;
756*d968e50bSDetlev Casanova 	vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
757*d968e50bSDetlev Casanova 
758*d968e50bSDetlev Casanova 	/*
759*d968e50bSDetlev Casanova 	 * vp9 stuff
760*d968e50bSDetlev Casanova 	 *
761*d968e50bSDetlev Casanova 	 * by this point the userspace has done all parts of 6.2 uncompressed_header()
762*d968e50bSDetlev Casanova 	 * except this fragment:
763*d968e50bSDetlev Casanova 	 * if ( FrameIsIntra || error_resilient_mode ) {
764*d968e50bSDetlev Casanova 	 *	setup_past_independence ( )
765*d968e50bSDetlev Casanova 	 *	if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
766*d968e50bSDetlev Casanova 	 *	     reset_frame_context == 3 ) {
767*d968e50bSDetlev Casanova 	 *		for ( i = 0; i < 4; i ++ ) {
768*d968e50bSDetlev Casanova 	 *			save_probs( i )
769*d968e50bSDetlev Casanova 	 *		}
770*d968e50bSDetlev Casanova 	 *	} else if ( reset_frame_context == 2 ) {
771*d968e50bSDetlev Casanova 	 *		save_probs( frame_context_idx )
772*d968e50bSDetlev Casanova 	 *	}
773*d968e50bSDetlev Casanova 	 *	frame_context_idx = 0
774*d968e50bSDetlev Casanova 	 * }
775*d968e50bSDetlev Casanova 	 */
776*d968e50bSDetlev Casanova 	fctx_idx = v4l2_vp9_reset_frame_ctx(dec_params, vp9_ctx->frame_context);
777*d968e50bSDetlev Casanova 	vp9_ctx->cur.frame_context_idx = fctx_idx;
778*d968e50bSDetlev Casanova 
779*d968e50bSDetlev Casanova 	/* 6.1 frame(sz): load_probs() and load_probs2() */
780*d968e50bSDetlev Casanova 	vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
781*d968e50bSDetlev Casanova 
782*d968e50bSDetlev Casanova 	/*
783*d968e50bSDetlev Casanova 	 * The userspace has also performed 6.3 compressed_header(), but handling the
784*d968e50bSDetlev Casanova 	 * probs in a special way. All probs which need updating, except MV-related,
785*d968e50bSDetlev Casanova 	 * have been read from the bitstream and translated through inv_map_table[],
786*d968e50bSDetlev Casanova 	 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
787*d968e50bSDetlev Casanova 	 * by userspace are either translated values (there are no 0 values in
788*d968e50bSDetlev Casanova 	 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
789*d968e50bSDetlev Casanova 	 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
790*d968e50bSDetlev Casanova 	 * performed. The values passed by userspace are either new values
791*d968e50bSDetlev Casanova 	 * to replace old ones (the above mentioned shift and bitwise or never result in
792*d968e50bSDetlev Casanova 	 * a zero) or zero to indicate no update.
793*d968e50bSDetlev Casanova 	 * fw_update_probs() performs actual probs updates or leaves probs as-is
794*d968e50bSDetlev Casanova 	 * for values for which a zero was passed from userspace.
795*d968e50bSDetlev Casanova 	 */
796*d968e50bSDetlev Casanova 	v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, dec_params);
797*d968e50bSDetlev Casanova 
798*d968e50bSDetlev Casanova 	return 0;
799*d968e50bSDetlev Casanova }
800*d968e50bSDetlev Casanova 
801*d968e50bSDetlev Casanova static int rkvdec_vp9_run(struct rkvdec_ctx *ctx)
802*d968e50bSDetlev Casanova {
803*d968e50bSDetlev Casanova 	struct rkvdec_dev *rkvdec = ctx->dev;
804*d968e50bSDetlev Casanova 	struct rkvdec_vp9_run run = { };
805*d968e50bSDetlev Casanova 	int ret;
806*d968e50bSDetlev Casanova 
807*d968e50bSDetlev Casanova 	ret = rkvdec_vp9_run_preamble(ctx, &run);
808*d968e50bSDetlev Casanova 	if (ret) {
809*d968e50bSDetlev Casanova 		rkvdec_run_postamble(ctx, &run.base);
810*d968e50bSDetlev Casanova 		return ret;
811*d968e50bSDetlev Casanova 	}
812*d968e50bSDetlev Casanova 
813*d968e50bSDetlev Casanova 	/* Prepare probs. */
814*d968e50bSDetlev Casanova 	init_probs(ctx, &run);
815*d968e50bSDetlev Casanova 
816*d968e50bSDetlev Casanova 	/* Configure hardware registers. */
817*d968e50bSDetlev Casanova 	config_registers(ctx, &run);
818*d968e50bSDetlev Casanova 
819*d968e50bSDetlev Casanova 	rkvdec_run_postamble(ctx, &run.base);
820*d968e50bSDetlev Casanova 
821*d968e50bSDetlev Casanova 	schedule_delayed_work(&rkvdec->watchdog_work, msecs_to_jiffies(2000));
822*d968e50bSDetlev Casanova 
823*d968e50bSDetlev Casanova 	writel(1, rkvdec->regs + RKVDEC_REG_PREF_LUMA_CACHE_COMMAND);
824*d968e50bSDetlev Casanova 	writel(1, rkvdec->regs + RKVDEC_REG_PREF_CHR_CACHE_COMMAND);
825*d968e50bSDetlev Casanova 
826*d968e50bSDetlev Casanova 	writel(0xe, rkvdec->regs + RKVDEC_REG_STRMD_ERR_EN);
827*d968e50bSDetlev Casanova 	/* Start decoding! */
828*d968e50bSDetlev Casanova 	writel(RKVDEC_INTERRUPT_DEC_E | RKVDEC_CONFIG_DEC_CLK_GATE_E |
829*d968e50bSDetlev Casanova 	       RKVDEC_TIMEOUT_E | RKVDEC_BUF_EMPTY_E,
830*d968e50bSDetlev Casanova 	       rkvdec->regs + RKVDEC_REG_INTERRUPT);
831*d968e50bSDetlev Casanova 
832*d968e50bSDetlev Casanova 	return 0;
833*d968e50bSDetlev Casanova }
834*d968e50bSDetlev Casanova 
835*d968e50bSDetlev Casanova #define copy_tx_and_skip(p1, p2)				\
836*d968e50bSDetlev Casanova do {								\
837*d968e50bSDetlev Casanova 	memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8));	\
838*d968e50bSDetlev Casanova 	memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16));	\
839*d968e50bSDetlev Casanova 	memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32));	\
840*d968e50bSDetlev Casanova 	memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip));	\
841*d968e50bSDetlev Casanova } while (0)
842*d968e50bSDetlev Casanova 
843*d968e50bSDetlev Casanova static void rkvdec_vp9_done(struct rkvdec_ctx *ctx,
844*d968e50bSDetlev Casanova 			    struct vb2_v4l2_buffer *src_buf,
845*d968e50bSDetlev Casanova 			    struct vb2_v4l2_buffer *dst_buf,
846*d968e50bSDetlev Casanova 			    enum vb2_buffer_state result)
847*d968e50bSDetlev Casanova {
848*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
849*d968e50bSDetlev Casanova 	unsigned int fctx_idx;
850*d968e50bSDetlev Casanova 
851*d968e50bSDetlev Casanova 	/* v4l2-specific stuff */
852*d968e50bSDetlev Casanova 	if (result == VB2_BUF_STATE_ERROR)
853*d968e50bSDetlev Casanova 		goto out_update_last;
854*d968e50bSDetlev Casanova 
855*d968e50bSDetlev Casanova 	/*
856*d968e50bSDetlev Casanova 	 * vp9 stuff
857*d968e50bSDetlev Casanova 	 *
858*d968e50bSDetlev Casanova 	 * 6.1.2 refresh_probs()
859*d968e50bSDetlev Casanova 	 *
860*d968e50bSDetlev Casanova 	 * In the spec a complementary condition goes last in 6.1.2 refresh_probs(),
861*d968e50bSDetlev Casanova 	 * but it makes no sense to perform all the activities from the first "if"
862*d968e50bSDetlev Casanova 	 * there if we actually are not refreshing the frame context. On top of that,
863*d968e50bSDetlev Casanova 	 * because of 6.2 uncompressed_header() whenever error_resilient_mode == 1,
864*d968e50bSDetlev Casanova 	 * refresh_frame_context == 0. Consequently, if we don't jump to out_update_last
865*d968e50bSDetlev Casanova 	 * it means error_resilient_mode must be 0.
866*d968e50bSDetlev Casanova 	 */
867*d968e50bSDetlev Casanova 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
868*d968e50bSDetlev Casanova 		goto out_update_last;
869*d968e50bSDetlev Casanova 
870*d968e50bSDetlev Casanova 	fctx_idx = vp9_ctx->cur.frame_context_idx;
871*d968e50bSDetlev Casanova 
872*d968e50bSDetlev Casanova 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
873*d968e50bSDetlev Casanova 		/* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
874*d968e50bSDetlev Casanova 		struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
875*d968e50bSDetlev Casanova 		bool frame_is_intra = vp9_ctx->cur.flags &
876*d968e50bSDetlev Casanova 		    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
877*d968e50bSDetlev Casanova 		struct tx_and_skip {
878*d968e50bSDetlev Casanova 			u8 tx8[2][1];
879*d968e50bSDetlev Casanova 			u8 tx16[2][2];
880*d968e50bSDetlev Casanova 			u8 tx32[2][3];
881*d968e50bSDetlev Casanova 			u8 skip[3];
882*d968e50bSDetlev Casanova 		} _tx_skip, *tx_skip = &_tx_skip;
883*d968e50bSDetlev Casanova 		struct v4l2_vp9_frame_symbol_counts *counts;
884*d968e50bSDetlev Casanova 
885*d968e50bSDetlev Casanova 		/* buffer the forward-updated TX and skip probs */
886*d968e50bSDetlev Casanova 		if (frame_is_intra)
887*d968e50bSDetlev Casanova 			copy_tx_and_skip(tx_skip, probs);
888*d968e50bSDetlev Casanova 
889*d968e50bSDetlev Casanova 		/* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
890*d968e50bSDetlev Casanova 		*probs = vp9_ctx->frame_context[fctx_idx];
891*d968e50bSDetlev Casanova 
892*d968e50bSDetlev Casanova 		/* if FrameIsIntra then undo the effect of load_probs2() */
893*d968e50bSDetlev Casanova 		if (frame_is_intra)
894*d968e50bSDetlev Casanova 			copy_tx_and_skip(probs, tx_skip);
895*d968e50bSDetlev Casanova 
896*d968e50bSDetlev Casanova 		counts = frame_is_intra ? &vp9_ctx->intra_cnts : &vp9_ctx->inter_cnts;
897*d968e50bSDetlev Casanova 		v4l2_vp9_adapt_coef_probs(probs, counts,
898*d968e50bSDetlev Casanova 					  !vp9_ctx->last.valid ||
899*d968e50bSDetlev Casanova 					  vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
900*d968e50bSDetlev Casanova 					  frame_is_intra);
901*d968e50bSDetlev Casanova 		if (!frame_is_intra) {
902*d968e50bSDetlev Casanova 			const struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts;
903*d968e50bSDetlev Casanova 			u32 classes[2][11];
904*d968e50bSDetlev Casanova 			int i;
905*d968e50bSDetlev Casanova 
906*d968e50bSDetlev Casanova 			inter_cnts = vp9_ctx->count_tbl.cpu;
907*d968e50bSDetlev Casanova 			for (i = 0; i < ARRAY_SIZE(classes); ++i)
908*d968e50bSDetlev Casanova 				memcpy(classes[i], inter_cnts->classes[i], sizeof(classes[0]));
909*d968e50bSDetlev Casanova 			counts->classes = &classes;
910*d968e50bSDetlev Casanova 
911*d968e50bSDetlev Casanova 			/* load_probs2() already done */
912*d968e50bSDetlev Casanova 			v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
913*d968e50bSDetlev Casanova 						     vp9_ctx->cur.reference_mode,
914*d968e50bSDetlev Casanova 						     vp9_ctx->cur.interpolation_filter,
915*d968e50bSDetlev Casanova 						     vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
916*d968e50bSDetlev Casanova 		}
917*d968e50bSDetlev Casanova 	}
918*d968e50bSDetlev Casanova 
919*d968e50bSDetlev Casanova 	/* 6.1.2 refresh_probs(): save_probs(fctx_idx) */
920*d968e50bSDetlev Casanova 	vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
921*d968e50bSDetlev Casanova 
922*d968e50bSDetlev Casanova out_update_last:
923*d968e50bSDetlev Casanova 	update_ctx_last_info(vp9_ctx);
924*d968e50bSDetlev Casanova }
925*d968e50bSDetlev Casanova 
926*d968e50bSDetlev Casanova static void rkvdec_init_v4l2_vp9_count_tbl(struct rkvdec_ctx *ctx)
927*d968e50bSDetlev Casanova {
928*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
929*d968e50bSDetlev Casanova 	struct rkvdec_vp9_intra_frame_symbol_counts *intra_cnts = vp9_ctx->count_tbl.cpu;
930*d968e50bSDetlev Casanova 	struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts = vp9_ctx->count_tbl.cpu;
931*d968e50bSDetlev Casanova 	int i, j, k, l, m;
932*d968e50bSDetlev Casanova 
933*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.partition = &inter_cnts->partition;
934*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.skip = &inter_cnts->skip;
935*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.intra_inter = &inter_cnts->inter;
936*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.tx32p = &inter_cnts->tx32p;
937*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.tx16p = &inter_cnts->tx16p;
938*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.tx8p = &inter_cnts->tx8p;
939*d968e50bSDetlev Casanova 
940*d968e50bSDetlev Casanova 	vp9_ctx->intra_cnts.partition = (u32 (*)[16][4])(&intra_cnts->partition);
941*d968e50bSDetlev Casanova 	vp9_ctx->intra_cnts.skip = &intra_cnts->skip;
942*d968e50bSDetlev Casanova 	vp9_ctx->intra_cnts.intra_inter = &intra_cnts->intra;
943*d968e50bSDetlev Casanova 	vp9_ctx->intra_cnts.tx32p = &intra_cnts->tx32p;
944*d968e50bSDetlev Casanova 	vp9_ctx->intra_cnts.tx16p = &intra_cnts->tx16p;
945*d968e50bSDetlev Casanova 	vp9_ctx->intra_cnts.tx8p = &intra_cnts->tx8p;
946*d968e50bSDetlev Casanova 
947*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.y_mode = &inter_cnts->y_mode;
948*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.uv_mode = &inter_cnts->uv_mode;
949*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.comp = &inter_cnts->comp;
950*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.comp_ref = &inter_cnts->comp_ref;
951*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.single_ref = &inter_cnts->single_ref;
952*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.mv_mode = &inter_cnts->mv_mode;
953*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.filter = &inter_cnts->filter;
954*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.mv_joint = &inter_cnts->mv_joint;
955*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.sign = &inter_cnts->sign;
956*d968e50bSDetlev Casanova 	/*
957*d968e50bSDetlev Casanova 	 * rk hardware actually uses "u32 classes[2][11 + 1];"
958*d968e50bSDetlev Casanova 	 * instead of "u32 classes[2][11];", so this must be explicitly
959*d968e50bSDetlev Casanova 	 * copied into vp9_ctx->classes when passing the data to the
960*d968e50bSDetlev Casanova 	 * vp9 library function
961*d968e50bSDetlev Casanova 	 */
962*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.class0 = &inter_cnts->class0;
963*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.bits = &inter_cnts->bits;
964*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.class0_fp = &inter_cnts->class0_fp;
965*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.fp = &inter_cnts->fp;
966*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.class0_hp = &inter_cnts->class0_hp;
967*d968e50bSDetlev Casanova 	vp9_ctx->inter_cnts.hp = &inter_cnts->hp;
968*d968e50bSDetlev Casanova 
969*d968e50bSDetlev Casanova #define INNERMOST_LOOP \
970*d968e50bSDetlev Casanova 	do {										\
971*d968e50bSDetlev Casanova 		for (m = 0; m < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0][0]); ++m) {\
972*d968e50bSDetlev Casanova 			vp9_ctx->inter_cnts.coeff[i][j][k][l][m] =			\
973*d968e50bSDetlev Casanova 				&inter_cnts->ref_cnt[k][i][j][l][m].coeff;		\
974*d968e50bSDetlev Casanova 			vp9_ctx->inter_cnts.eob[i][j][k][l][m][0] =			\
975*d968e50bSDetlev Casanova 				&inter_cnts->ref_cnt[k][i][j][l][m].eob[0];		\
976*d968e50bSDetlev Casanova 			vp9_ctx->inter_cnts.eob[i][j][k][l][m][1] =			\
977*d968e50bSDetlev Casanova 				&inter_cnts->ref_cnt[k][i][j][l][m].eob[1];		\
978*d968e50bSDetlev Casanova 											\
979*d968e50bSDetlev Casanova 			vp9_ctx->intra_cnts.coeff[i][j][k][l][m] =			\
980*d968e50bSDetlev Casanova 				&intra_cnts->ref_cnt[k][i][j][l][m].coeff;		\
981*d968e50bSDetlev Casanova 			vp9_ctx->intra_cnts.eob[i][j][k][l][m][0] =			\
982*d968e50bSDetlev Casanova 				&intra_cnts->ref_cnt[k][i][j][l][m].eob[0];		\
983*d968e50bSDetlev Casanova 			vp9_ctx->intra_cnts.eob[i][j][k][l][m][1] =			\
984*d968e50bSDetlev Casanova 				&intra_cnts->ref_cnt[k][i][j][l][m].eob[1];		\
985*d968e50bSDetlev Casanova 		}									\
986*d968e50bSDetlev Casanova 	} while (0)
987*d968e50bSDetlev Casanova 
988*d968e50bSDetlev Casanova 	for (i = 0; i < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff); ++i)
989*d968e50bSDetlev Casanova 		for (j = 0; j < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0]); ++j)
990*d968e50bSDetlev Casanova 			for (k = 0; k < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0]); ++k)
991*d968e50bSDetlev Casanova 				for (l = 0; l < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0]); ++l)
992*d968e50bSDetlev Casanova 					INNERMOST_LOOP;
993*d968e50bSDetlev Casanova #undef INNERMOST_LOOP
994*d968e50bSDetlev Casanova }
995*d968e50bSDetlev Casanova 
996*d968e50bSDetlev Casanova static int rkvdec_vp9_start(struct rkvdec_ctx *ctx)
997*d968e50bSDetlev Casanova {
998*d968e50bSDetlev Casanova 	struct rkvdec_dev *rkvdec = ctx->dev;
999*d968e50bSDetlev Casanova 	struct rkvdec_vp9_priv_tbl *priv_tbl;
1000*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx;
1001*d968e50bSDetlev Casanova 	unsigned char *count_tbl;
1002*d968e50bSDetlev Casanova 	int ret;
1003*d968e50bSDetlev Casanova 
1004*d968e50bSDetlev Casanova 	vp9_ctx = kzalloc(sizeof(*vp9_ctx), GFP_KERNEL);
1005*d968e50bSDetlev Casanova 	if (!vp9_ctx)
1006*d968e50bSDetlev Casanova 		return -ENOMEM;
1007*d968e50bSDetlev Casanova 
1008*d968e50bSDetlev Casanova 	ctx->priv = vp9_ctx;
1009*d968e50bSDetlev Casanova 
1010*d968e50bSDetlev Casanova 	BUILD_BUG_ON(sizeof(priv_tbl->probs) % 16); /* ensure probs size is 128-bit aligned */
1011*d968e50bSDetlev Casanova 	priv_tbl = dma_alloc_coherent(rkvdec->dev, sizeof(*priv_tbl),
1012*d968e50bSDetlev Casanova 				      &vp9_ctx->priv_tbl.dma, GFP_KERNEL);
1013*d968e50bSDetlev Casanova 	if (!priv_tbl) {
1014*d968e50bSDetlev Casanova 		ret = -ENOMEM;
1015*d968e50bSDetlev Casanova 		goto err_free_ctx;
1016*d968e50bSDetlev Casanova 	}
1017*d968e50bSDetlev Casanova 
1018*d968e50bSDetlev Casanova 	vp9_ctx->priv_tbl.size = sizeof(*priv_tbl);
1019*d968e50bSDetlev Casanova 	vp9_ctx->priv_tbl.cpu = priv_tbl;
1020*d968e50bSDetlev Casanova 
1021*d968e50bSDetlev Casanova 	count_tbl = dma_alloc_coherent(rkvdec->dev, RKVDEC_VP9_COUNT_SIZE,
1022*d968e50bSDetlev Casanova 				       &vp9_ctx->count_tbl.dma, GFP_KERNEL);
1023*d968e50bSDetlev Casanova 	if (!count_tbl) {
1024*d968e50bSDetlev Casanova 		ret = -ENOMEM;
1025*d968e50bSDetlev Casanova 		goto err_free_priv_tbl;
1026*d968e50bSDetlev Casanova 	}
1027*d968e50bSDetlev Casanova 
1028*d968e50bSDetlev Casanova 	vp9_ctx->count_tbl.size = RKVDEC_VP9_COUNT_SIZE;
1029*d968e50bSDetlev Casanova 	vp9_ctx->count_tbl.cpu = count_tbl;
1030*d968e50bSDetlev Casanova 	rkvdec_init_v4l2_vp9_count_tbl(ctx);
1031*d968e50bSDetlev Casanova 
1032*d968e50bSDetlev Casanova 	return 0;
1033*d968e50bSDetlev Casanova 
1034*d968e50bSDetlev Casanova err_free_priv_tbl:
1035*d968e50bSDetlev Casanova 	dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1036*d968e50bSDetlev Casanova 			  vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1037*d968e50bSDetlev Casanova 
1038*d968e50bSDetlev Casanova err_free_ctx:
1039*d968e50bSDetlev Casanova 	kfree(vp9_ctx);
1040*d968e50bSDetlev Casanova 	return ret;
1041*d968e50bSDetlev Casanova }
1042*d968e50bSDetlev Casanova 
1043*d968e50bSDetlev Casanova static void rkvdec_vp9_stop(struct rkvdec_ctx *ctx)
1044*d968e50bSDetlev Casanova {
1045*d968e50bSDetlev Casanova 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
1046*d968e50bSDetlev Casanova 	struct rkvdec_dev *rkvdec = ctx->dev;
1047*d968e50bSDetlev Casanova 
1048*d968e50bSDetlev Casanova 	dma_free_coherent(rkvdec->dev, vp9_ctx->count_tbl.size,
1049*d968e50bSDetlev Casanova 			  vp9_ctx->count_tbl.cpu, vp9_ctx->count_tbl.dma);
1050*d968e50bSDetlev Casanova 	dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1051*d968e50bSDetlev Casanova 			  vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1052*d968e50bSDetlev Casanova 	kfree(vp9_ctx);
1053*d968e50bSDetlev Casanova }
1054*d968e50bSDetlev Casanova 
1055*d968e50bSDetlev Casanova static int rkvdec_vp9_adjust_fmt(struct rkvdec_ctx *ctx,
1056*d968e50bSDetlev Casanova 				 struct v4l2_format *f)
1057*d968e50bSDetlev Casanova {
1058*d968e50bSDetlev Casanova 	struct v4l2_pix_format_mplane *fmt = &f->fmt.pix_mp;
1059*d968e50bSDetlev Casanova 
1060*d968e50bSDetlev Casanova 	fmt->num_planes = 1;
1061*d968e50bSDetlev Casanova 	if (!fmt->plane_fmt[0].sizeimage)
1062*d968e50bSDetlev Casanova 		fmt->plane_fmt[0].sizeimage = fmt->width * fmt->height * 2;
1063*d968e50bSDetlev Casanova 	return 0;
1064*d968e50bSDetlev Casanova }
1065*d968e50bSDetlev Casanova 
1066*d968e50bSDetlev Casanova const struct rkvdec_coded_fmt_ops rkvdec_vp9_fmt_ops = {
1067*d968e50bSDetlev Casanova 	.adjust_fmt = rkvdec_vp9_adjust_fmt,
1068*d968e50bSDetlev Casanova 	.start = rkvdec_vp9_start,
1069*d968e50bSDetlev Casanova 	.stop = rkvdec_vp9_stop,
1070*d968e50bSDetlev Casanova 	.run = rkvdec_vp9_run,
1071*d968e50bSDetlev Casanova 	.done = rkvdec_vp9_done,
1072*d968e50bSDetlev Casanova };
1073