xref: /linux/drivers/media/platform/qcom/iris/iris_vpu_buffer.c (revision 989fe6771266bdb82a815d78802c5aa7c918fdfd)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
4  */
5 
6 #include "iris_instance.h"
7 #include "iris_vpu_buffer.h"
8 
9 static u32 size_h264d_hw_bin_buffer(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
10 {
11 	u32 size_yuv, size_bin_hdr, size_bin_res;
12 
13 	size_yuv = ((frame_width * frame_height) <= BIN_BUFFER_THRESHOLD) ?
14 			((BIN_BUFFER_THRESHOLD * 3) >> 1) :
15 			((frame_width * frame_height * 3) >> 1);
16 	size_bin_hdr = size_yuv * H264_CABAC_HDR_RATIO_HD_TOT;
17 	size_bin_res = size_yuv * H264_CABAC_RES_RATIO_HD_TOT;
18 	size_bin_hdr = ALIGN(size_bin_hdr / num_vpp_pipes,
19 			     DMA_ALIGNMENT) * num_vpp_pipes;
20 	size_bin_res = ALIGN(size_bin_res / num_vpp_pipes,
21 			     DMA_ALIGNMENT) * num_vpp_pipes;
22 
23 	return size_bin_hdr + size_bin_res;
24 }
25 
26 static u32 hfi_buffer_bin_h264d(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
27 {
28 	u32 n_aligned_h = ALIGN(frame_height, 16);
29 	u32 n_aligned_w = ALIGN(frame_width, 16);
30 
31 	return size_h264d_hw_bin_buffer(n_aligned_w, n_aligned_h, num_vpp_pipes);
32 }
33 
34 static u32 size_h265d_hw_bin_buffer(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
35 {
36 	u32 product = frame_width * frame_height;
37 	u32 size_yuv, size_bin_hdr, size_bin_res;
38 
39 	size_yuv = (product <= BIN_BUFFER_THRESHOLD) ?
40 		((BIN_BUFFER_THRESHOLD * 3) >> 1) : ((product * 3) >> 1);
41 	size_bin_hdr = size_yuv * H265_CABAC_HDR_RATIO_HD_TOT;
42 	size_bin_res = size_yuv * H265_CABAC_RES_RATIO_HD_TOT;
43 	size_bin_hdr = ALIGN(size_bin_hdr / num_vpp_pipes, DMA_ALIGNMENT) * num_vpp_pipes;
44 	size_bin_res = ALIGN(size_bin_res / num_vpp_pipes, DMA_ALIGNMENT) * num_vpp_pipes;
45 
46 	return size_bin_hdr + size_bin_res;
47 }
48 
49 static u32 hfi_buffer_bin_vp9d(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
50 {
51 	u32 _size_yuv = ALIGN(frame_width, 16) * ALIGN(frame_height, 16) * 3 / 2;
52 	u32 _size = ALIGN(((max_t(u32, _size_yuv, ((BIN_BUFFER_THRESHOLD * 3) >> 1)) *
53 			VPX_DECODER_FRAME_BIN_HDR_BUDGET / VPX_DECODER_FRAME_BIN_DENOMINATOR *
54 			VPX_DECODER_FRAME_CONCURENCY_LVL) / num_vpp_pipes), DMA_ALIGNMENT) +
55 			ALIGN(((max_t(u32, _size_yuv, ((BIN_BUFFER_THRESHOLD * 3) >> 1)) *
56 			VPX_DECODER_FRAME_BIN_RES_BUDGET / VPX_DECODER_FRAME_BIN_DENOMINATOR *
57 			VPX_DECODER_FRAME_CONCURENCY_LVL) / num_vpp_pipes), DMA_ALIGNMENT);
58 
59 	return _size * num_vpp_pipes;
60 }
61 
62 static u32 hfi_buffer_bin_h265d(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
63 {
64 	u32 n_aligned_w = ALIGN(frame_width, 16);
65 	u32 n_aligned_h = ALIGN(frame_height, 16);
66 
67 	return size_h265d_hw_bin_buffer(n_aligned_w, n_aligned_h, num_vpp_pipes);
68 }
69 
70 static u32 hfi_buffer_comv_h264d(u32 frame_width, u32 frame_height, u32 _comv_bufcount)
71 {
72 	u32 frame_height_in_mbs = DIV_ROUND_UP(frame_height, 16);
73 	u32 frame_width_in_mbs = DIV_ROUND_UP(frame_width, 16);
74 	u32 col_zero_aligned_width = (frame_width_in_mbs << 2);
75 	u32 col_mv_aligned_width = (frame_width_in_mbs << 7);
76 	u32 col_zero_size, size_colloc;
77 
78 	col_mv_aligned_width = ALIGN(col_mv_aligned_width, 16);
79 	col_zero_aligned_width = ALIGN(col_zero_aligned_width, 16);
80 	col_zero_size = col_zero_aligned_width *
81 			((frame_height_in_mbs + 1) >> 1);
82 	col_zero_size = ALIGN(col_zero_size, 64);
83 	col_zero_size <<= 1;
84 	col_zero_size = ALIGN(col_zero_size, 512);
85 	size_colloc = col_mv_aligned_width * ((frame_height_in_mbs + 1) >> 1);
86 	size_colloc = ALIGN(size_colloc, 64);
87 	size_colloc <<= 1;
88 	size_colloc = ALIGN(size_colloc, 512);
89 	size_colloc += (col_zero_size + SIZE_H264D_BUFTAB_T * 2);
90 
91 	return (size_colloc * (_comv_bufcount)) + 512;
92 }
93 
94 static u32 hfi_buffer_comv_h265d(u32 frame_width, u32 frame_height, u32 _comv_bufcount)
95 {
96 	u32 frame_height_in_mbs = (frame_height + 15) >> 4;
97 	u32 frame_width_in_mbs = (frame_width + 15) >> 4;
98 	u32 _size;
99 
100 	_size = ALIGN(((frame_width_in_mbs * frame_height_in_mbs) << 8), 512);
101 
102 	return (_size * (_comv_bufcount)) + 512;
103 }
104 
105 static u32 size_h264d_bse_cmd_buf(u32 frame_height)
106 {
107 	u32 height = ALIGN(frame_height, 32);
108 
109 	return min_t(u32, (DIV_ROUND_UP(height, 16) * 48), H264D_MAX_SLICE) *
110 		SIZE_H264D_BSE_CMD_PER_BUF;
111 }
112 
113 static u32 size_h265d_bse_cmd_buf(u32 frame_width, u32 frame_height)
114 {
115 	u32 _size = ALIGN(((ALIGN(frame_width, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS) *
116 			   (ALIGN(frame_height, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS)) *
117 			  NUM_HW_PIC_BUF, DMA_ALIGNMENT);
118 	_size = min_t(u32, _size, H265D_MAX_SLICE + 1);
119 	_size = 2 * _size * SIZE_H265D_BSE_CMD_PER_BUF;
120 
121 	return _size;
122 }
123 
124 static u32 hfi_buffer_persist_h265d(u32 rpu_enabled)
125 {
126 	return ALIGN((SIZE_SLIST_BUF_H265 * NUM_SLIST_BUF_H265 +
127 		      H265_NUM_FRM_INFO * H265_DISPLAY_BUF_SIZE +
128 		      H265_NUM_TILE * sizeof(u32) +
129 		      NUM_HW_PIC_BUF * SIZE_SEI_USERDATA +
130 		      rpu_enabled * NUM_HW_PIC_BUF * SIZE_DOLBY_RPU_METADATA),
131 		     DMA_ALIGNMENT);
132 }
133 
134 static inline
135 u32 hfi_iris3_vp9d_comv_size(void)
136 {
137 	return (((8192 + 63) >> 6) * ((4320 + 63) >> 6) * 8 * 8 * 2 * 8);
138 }
139 
140 static u32 hfi_buffer_persist_vp9d(void)
141 {
142 	return ALIGN(VP9_NUM_PROBABILITY_TABLE_BUF * VP9_PROB_TABLE_SIZE, DMA_ALIGNMENT) +
143 		ALIGN(hfi_iris3_vp9d_comv_size(), DMA_ALIGNMENT) +
144 		ALIGN(MAX_SUPERFRAME_HEADER_LEN, DMA_ALIGNMENT) +
145 		ALIGN(VP9_UDC_HEADER_BUF_SIZE, DMA_ALIGNMENT) +
146 		ALIGN(VP9_NUM_FRAME_INFO_BUF * CCE_TILE_OFFSET_SIZE, DMA_ALIGNMENT) +
147 		ALIGN(VP9_NUM_FRAME_INFO_BUF * VP9_FRAME_INFO_BUF_SIZE, DMA_ALIGNMENT) +
148 		HDR10_HIST_EXTRADATA_SIZE;
149 }
150 
151 static u32 size_h264d_vpp_cmd_buf(u32 frame_height)
152 {
153 	u32 size, height = ALIGN(frame_height, 32);
154 
155 	size = min_t(u32, (DIV_ROUND_UP(height, 16) * 48), H264D_MAX_SLICE) *
156 			SIZE_H264D_VPP_CMD_PER_BUF;
157 
158 	return size > VPP_CMD_MAX_SIZE ? VPP_CMD_MAX_SIZE : size;
159 }
160 
161 static u32 hfi_buffer_persist_h264d(void)
162 {
163 	return ALIGN(SIZE_SLIST_BUF_H264 * NUM_SLIST_BUF_H264 +
164 		    H264_DISPLAY_BUF_SIZE * H264_NUM_FRM_INFO +
165 		    NUM_HW_PIC_BUF * SIZE_SEI_USERDATA,
166 		    DMA_ALIGNMENT);
167 }
168 
169 static u32 hfi_buffer_non_comv_h264d(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
170 {
171 	u32 size_bse = size_h264d_bse_cmd_buf(frame_height);
172 	u32 size_vpp = size_h264d_vpp_cmd_buf(frame_height);
173 	u32 size = ALIGN(size_bse, DMA_ALIGNMENT) +
174 		ALIGN(size_vpp, DMA_ALIGNMENT) +
175 		ALIGN(SIZE_HW_PIC(SIZE_H264D_HW_PIC_T), DMA_ALIGNMENT);
176 
177 	return ALIGN(size, DMA_ALIGNMENT);
178 }
179 
180 static u32 size_h265d_vpp_cmd_buf(u32 frame_width, u32 frame_height)
181 {
182 	u32 _size = ALIGN(((ALIGN(frame_width, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS) *
183 			   (ALIGN(frame_height, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS)) *
184 			  NUM_HW_PIC_BUF, DMA_ALIGNMENT);
185 	_size = min_t(u32, _size, H265D_MAX_SLICE + 1);
186 	_size = ALIGN(_size, 4);
187 	_size = 2 * _size * SIZE_H265D_VPP_CMD_PER_BUF;
188 	if (_size > VPP_CMD_MAX_SIZE)
189 		_size = VPP_CMD_MAX_SIZE;
190 
191 	return _size;
192 }
193 
194 static u32 hfi_buffer_non_comv_h265d(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
195 {
196 	u32 _size_bse = size_h265d_bse_cmd_buf(frame_width, frame_height);
197 	u32 _size_vpp = size_h265d_vpp_cmd_buf(frame_width, frame_height);
198 	u32 _size = ALIGN(_size_bse, DMA_ALIGNMENT) +
199 		ALIGN(_size_vpp, DMA_ALIGNMENT) +
200 		ALIGN(NUM_HW_PIC_BUF * 20 * 22 * 4, DMA_ALIGNMENT) +
201 		ALIGN(2 * sizeof(u16) *
202 		(ALIGN(frame_width, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS) *
203 		(ALIGN(frame_height, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS), DMA_ALIGNMENT) +
204 		ALIGN(SIZE_HW_PIC(SIZE_H265D_HW_PIC_T), DMA_ALIGNMENT) +
205 		HDR10_HIST_EXTRADATA_SIZE;
206 
207 	return ALIGN(_size, DMA_ALIGNMENT);
208 }
209 
210 static u32 size_vpss_lb(u32 frame_width, u32 frame_height)
211 {
212 	u32 opb_lb_wr_llb_y_buffer_size, opb_lb_wr_llb_uv_buffer_size;
213 	u32 opb_wr_top_line_chroma_buffer_size;
214 	u32 opb_wr_top_line_luma_buffer_size;
215 	u32 macrotiling_size = 32;
216 
217 	opb_wr_top_line_luma_buffer_size =
218 		ALIGN(frame_width, macrotiling_size) / macrotiling_size * 256;
219 	opb_wr_top_line_luma_buffer_size =
220 		ALIGN(opb_wr_top_line_luma_buffer_size, DMA_ALIGNMENT) +
221 		(MAX_TILE_COLUMNS - 1) * 256;
222 	opb_wr_top_line_luma_buffer_size =
223 		max_t(u32, opb_wr_top_line_luma_buffer_size, (32 * ALIGN(frame_height, 8)));
224 	opb_wr_top_line_chroma_buffer_size = opb_wr_top_line_luma_buffer_size;
225 	opb_lb_wr_llb_uv_buffer_size =
226 		ALIGN((ALIGN(frame_height, 8) / (4 / 2)) * 64, 32);
227 	opb_lb_wr_llb_y_buffer_size =
228 		ALIGN((ALIGN(frame_height, 8) / (4 / 2)) * 64, 32);
229 	return opb_wr_top_line_luma_buffer_size +
230 		opb_wr_top_line_chroma_buffer_size +
231 		opb_lb_wr_llb_uv_buffer_size +
232 		opb_lb_wr_llb_y_buffer_size;
233 }
234 
235 static inline
236 u32 size_h265d_lb_fe_top_data(u32 frame_width, u32 frame_height)
237 {
238 	return MAX_FE_NBR_DATA_LUMA_LINE_BUFFER_SIZE *
239 		(ALIGN(frame_width, 64) + 8) * 2;
240 }
241 
242 static inline
243 u32 size_h265d_lb_fe_top_ctrl(u32 frame_width, u32 frame_height)
244 {
245 	return MAX_FE_NBR_CTRL_LCU64_LINE_BUFFER_SIZE *
246 		(ALIGN(frame_width, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS);
247 }
248 
249 static inline
250 u32 size_h265d_lb_fe_left_ctrl(u32 frame_width, u32 frame_height)
251 {
252 	return MAX_FE_NBR_CTRL_LCU64_LINE_BUFFER_SIZE *
253 		(ALIGN(frame_height, LCU_MAX_SIZE_PELS) / LCU_MIN_SIZE_PELS);
254 }
255 
256 static inline
257 u32 size_h265d_lb_se_top_ctrl(u32 frame_width, u32 frame_height)
258 {
259 	return (LCU_MAX_SIZE_PELS / 8 * (128 / 8)) * ((frame_width + 15) >> 4);
260 }
261 
262 static inline
263 u32 size_h265d_lb_se_left_ctrl(u32 frame_width, u32 frame_height)
264 {
265 	return max_t(u32, ((frame_height + 16 - 1) / 8) *
266 		MAX_SE_NBR_CTRL_LCU16_LINE_BUFFER_SIZE,
267 		max_t(u32, ((frame_height + 32 - 1) / 8) *
268 		MAX_SE_NBR_CTRL_LCU32_LINE_BUFFER_SIZE,
269 		((frame_height + 64 - 1) / 8) *
270 		MAX_SE_NBR_CTRL_LCU64_LINE_BUFFER_SIZE));
271 }
272 
273 static inline
274 u32 size_h265d_lb_pe_top_data(u32 frame_width, u32 frame_height)
275 {
276 	return MAX_PE_NBR_DATA_LCU64_LINE_BUFFER_SIZE *
277 		(ALIGN(frame_width, LCU_MIN_SIZE_PELS) / LCU_MIN_SIZE_PELS);
278 }
279 
280 static inline
281 u32 size_h265d_lb_vsp_top(u32 frame_width, u32 frame_height)
282 {
283 	return ((frame_width + 63) >> 6) * 128;
284 }
285 
286 static inline
287 u32 size_h265d_lb_vsp_left(u32 frame_width, u32 frame_height)
288 {
289 	return ((frame_height + 63) >> 6) * 128;
290 }
291 
292 static inline
293 u32 size_h265d_lb_recon_dma_metadata_wr(u32 frame_width, u32 frame_height)
294 {
295 	return size_h264d_lb_recon_dma_metadata_wr(frame_height);
296 }
297 
298 static inline
299 u32 size_h265d_qp(u32 frame_width, u32 frame_height)
300 {
301 	return size_h264d_qp(frame_width, frame_height);
302 }
303 
304 static inline
305 u32 hfi_buffer_line_h265d(u32 frame_width, u32 frame_height, bool is_opb, u32 num_vpp_pipes)
306 {
307 	u32 vpss_lb_size = 0, _size;
308 
309 	_size = ALIGN(size_h265d_lb_fe_top_data(frame_width, frame_height), DMA_ALIGNMENT) +
310 		ALIGN(size_h265d_lb_fe_top_ctrl(frame_width, frame_height), DMA_ALIGNMENT) +
311 		ALIGN(size_h265d_lb_fe_left_ctrl(frame_width, frame_height),
312 		      DMA_ALIGNMENT) * num_vpp_pipes +
313 		ALIGN(size_h265d_lb_se_left_ctrl(frame_width, frame_height),
314 		      DMA_ALIGNMENT) * num_vpp_pipes +
315 		ALIGN(size_h265d_lb_se_top_ctrl(frame_width, frame_height), DMA_ALIGNMENT) +
316 		ALIGN(size_h265d_lb_pe_top_data(frame_width, frame_height), DMA_ALIGNMENT) +
317 		ALIGN(size_h265d_lb_vsp_top(frame_width, frame_height), DMA_ALIGNMENT) +
318 		ALIGN(size_h265d_lb_vsp_left(frame_width, frame_height),
319 		      DMA_ALIGNMENT) * num_vpp_pipes +
320 		ALIGN(size_h265d_lb_recon_dma_metadata_wr(frame_width, frame_height),
321 		      DMA_ALIGNMENT) * 4 +
322 		ALIGN(size_h265d_qp(frame_width, frame_height), DMA_ALIGNMENT);
323 	if (is_opb)
324 		vpss_lb_size = size_vpss_lb(frame_width, frame_height);
325 
326 	return ALIGN((_size + vpss_lb_size), DMA_ALIGNMENT);
327 }
328 
329 static inline
330 u32 size_vpxd_lb_fe_left_ctrl(u32 frame_width, u32 frame_height)
331 {
332 	return max_t(u32, ((frame_height + 15) >> 4) *
333 		     MAX_FE_NBR_CTRL_LCU16_LINE_BUFFER_SIZE,
334 		     max_t(u32, ((frame_height + 31) >> 5) *
335 			   MAX_FE_NBR_CTRL_LCU32_LINE_BUFFER_SIZE,
336 			   ((frame_height + 63) >> 6) *
337 			   MAX_FE_NBR_CTRL_LCU64_LINE_BUFFER_SIZE));
338 }
339 
340 static inline
341 u32 size_vpxd_lb_fe_top_ctrl(u32 frame_width, u32 frame_height)
342 {
343 	return ((ALIGN(frame_width, 64) + 8) * 10 * 2);
344 }
345 
346 static inline
347 u32 size_vpxd_lb_se_top_ctrl(u32 frame_width, u32 frame_height)
348 {
349 	return ((frame_width + 15) >> 4) * MAX_FE_NBR_CTRL_LCU16_LINE_BUFFER_SIZE;
350 }
351 
352 static inline
353 u32 size_vpxd_lb_se_left_ctrl(u32 frame_width, u32 frame_height)
354 {
355 	return max_t(u32, ((frame_height + 15) >> 4) *
356 		     MAX_SE_NBR_CTRL_LCU16_LINE_BUFFER_SIZE,
357 		     max_t(u32, ((frame_height + 31) >> 5) *
358 			   MAX_SE_NBR_CTRL_LCU32_LINE_BUFFER_SIZE,
359 			   ((frame_height + 63) >> 6) *
360 			   MAX_SE_NBR_CTRL_LCU64_LINE_BUFFER_SIZE));
361 }
362 
363 static inline
364 u32 size_vpxd_lb_recon_dma_metadata_wr(u32 frame_width, u32 frame_height)
365 {
366 	return ALIGN((ALIGN(frame_height, 8) / (4 / 2)) * 64,
367 		BUFFER_ALIGNMENT_32_BYTES);
368 }
369 
370 static inline __maybe_unused
371 u32 size_mp2d_lb_fe_top_data(u32 frame_width, u32 frame_height)
372 {
373 	return ((ALIGN(frame_width, 16) + 8) * 10 * 2);
374 }
375 
376 static inline
377 u32 size_vp9d_lb_fe_top_data(u32 frame_width, u32 frame_height)
378 {
379 	return (ALIGN(ALIGN(frame_width, 8), 64) + 8) * 10 * 2;
380 }
381 
382 static inline
383 u32 size_vp9d_lb_pe_top_data(u32 frame_width, u32 frame_height)
384 {
385 	return ((ALIGN(ALIGN(frame_width, 8), 64) >> 6) * 176);
386 }
387 
388 static inline
389 u32 size_vp9d_lb_vsp_top(u32 frame_width, u32 frame_height)
390 {
391 	return (((ALIGN(ALIGN(frame_width, 8), 64) >> 6) * 64 * 8) + 256);
392 }
393 
394 static inline
395 u32 size_vp9d_qp(u32 frame_width, u32 frame_height)
396 {
397 	return size_h264d_qp(frame_width, frame_height);
398 }
399 
400 static inline
401 u32 hfi_iris3_vp9d_lb_size(u32 frame_width, u32 frame_height, u32 num_vpp_pipes)
402 {
403 	return ALIGN(size_vpxd_lb_fe_left_ctrl(frame_width, frame_height), DMA_ALIGNMENT) *
404 		num_vpp_pipes +
405 		ALIGN(size_vpxd_lb_se_left_ctrl(frame_width, frame_height), DMA_ALIGNMENT) *
406 		num_vpp_pipes +
407 		ALIGN(size_vp9d_lb_vsp_top(frame_width, frame_height), DMA_ALIGNMENT) +
408 		ALIGN(size_vpxd_lb_fe_top_ctrl(frame_width, frame_height), DMA_ALIGNMENT) +
409 		2 * ALIGN(size_vpxd_lb_recon_dma_metadata_wr(frame_width, frame_height),
410 			  DMA_ALIGNMENT) +
411 		ALIGN(size_vpxd_lb_se_top_ctrl(frame_width, frame_height), DMA_ALIGNMENT) +
412 		ALIGN(size_vp9d_lb_pe_top_data(frame_width, frame_height), DMA_ALIGNMENT) +
413 		ALIGN(size_vp9d_lb_fe_top_data(frame_width, frame_height), DMA_ALIGNMENT) +
414 		ALIGN(size_vp9d_qp(frame_width, frame_height), DMA_ALIGNMENT);
415 }
416 
417 static inline
418 u32 hfi_buffer_line_vp9d(u32 frame_width, u32 frame_height, u32 _yuv_bufcount_min, bool is_opb,
419 			 u32 num_vpp_pipes)
420 {
421 	u32 vpss_lb_size = 0;
422 	u32 _lb_size;
423 
424 	_lb_size = hfi_iris3_vp9d_lb_size(frame_width, frame_height, num_vpp_pipes);
425 
426 	if (is_opb)
427 		vpss_lb_size = size_vpss_lb(frame_width, frame_height);
428 
429 	return _lb_size + vpss_lb_size + 4096;
430 }
431 
432 static u32 hfi_buffer_line_h264d(u32 frame_width, u32 frame_height,
433 				 bool is_opb, u32 num_vpp_pipes)
434 {
435 	u32 vpss_lb_size = 0;
436 	u32 size;
437 
438 	size = ALIGN(size_h264d_lb_fe_top_data(frame_width), DMA_ALIGNMENT) +
439 		ALIGN(size_h264d_lb_fe_top_ctrl(frame_width), DMA_ALIGNMENT) +
440 		ALIGN(size_h264d_lb_fe_left_ctrl(frame_height), DMA_ALIGNMENT) * num_vpp_pipes +
441 		ALIGN(size_h264d_lb_se_top_ctrl(frame_width), DMA_ALIGNMENT) +
442 		ALIGN(size_h264d_lb_se_left_ctrl(frame_height), DMA_ALIGNMENT) * num_vpp_pipes +
443 		ALIGN(size_h264d_lb_pe_top_data(frame_width), DMA_ALIGNMENT) +
444 		ALIGN(size_h264d_lb_vsp_top(frame_width), DMA_ALIGNMENT) +
445 		ALIGN(size_h264d_lb_recon_dma_metadata_wr(frame_height), DMA_ALIGNMENT) * 2 +
446 		ALIGN(size_h264d_qp(frame_width, frame_height), DMA_ALIGNMENT);
447 	size = ALIGN(size, DMA_ALIGNMENT);
448 	if (is_opb)
449 		vpss_lb_size = size_vpss_lb(frame_width, frame_height);
450 
451 	return ALIGN((size + vpss_lb_size), DMA_ALIGNMENT);
452 }
453 
454 static u32 iris_vpu_dec_bin_size(struct iris_inst *inst)
455 {
456 	u32 num_vpp_pipes = inst->core->iris_platform_data->num_vpp_pipe;
457 	struct v4l2_format *f = inst->fmt_src;
458 	u32 height = f->fmt.pix_mp.height;
459 	u32 width = f->fmt.pix_mp.width;
460 
461 	if (inst->codec == V4L2_PIX_FMT_H264)
462 		return hfi_buffer_bin_h264d(width, height, num_vpp_pipes);
463 	else if (inst->codec == V4L2_PIX_FMT_HEVC)
464 		return hfi_buffer_bin_h265d(width, height, num_vpp_pipes);
465 	else if (inst->codec == V4L2_PIX_FMT_VP9)
466 		return hfi_buffer_bin_vp9d(width, height, num_vpp_pipes);
467 
468 	return 0;
469 }
470 
471 static u32 iris_vpu_dec_comv_size(struct iris_inst *inst)
472 {
473 	u32 num_comv = VIDEO_MAX_FRAME;
474 	struct v4l2_format *f = inst->fmt_src;
475 	u32 height = f->fmt.pix_mp.height;
476 	u32 width = f->fmt.pix_mp.width;
477 
478 	if (inst->codec == V4L2_PIX_FMT_H264)
479 		return hfi_buffer_comv_h264d(width, height, num_comv);
480 	else if (inst->codec == V4L2_PIX_FMT_HEVC)
481 		return hfi_buffer_comv_h265d(width, height, num_comv);
482 
483 	return 0;
484 }
485 
486 static u32 iris_vpu_dec_persist_size(struct iris_inst *inst)
487 {
488 	if (inst->codec == V4L2_PIX_FMT_H264)
489 		return hfi_buffer_persist_h264d();
490 	else if (inst->codec == V4L2_PIX_FMT_HEVC)
491 		return hfi_buffer_persist_h265d(0);
492 	else if (inst->codec == V4L2_PIX_FMT_VP9)
493 		return hfi_buffer_persist_vp9d();
494 
495 	return 0;
496 }
497 
498 static u32 iris_vpu_dec_dpb_size(struct iris_inst *inst)
499 {
500 	if (iris_split_mode_enabled(inst))
501 		return iris_get_buffer_size(inst, BUF_DPB);
502 	else
503 		return 0;
504 }
505 
506 static u32 iris_vpu_dec_non_comv_size(struct iris_inst *inst)
507 {
508 	u32 num_vpp_pipes = inst->core->iris_platform_data->num_vpp_pipe;
509 	struct v4l2_format *f = inst->fmt_src;
510 	u32 height = f->fmt.pix_mp.height;
511 	u32 width = f->fmt.pix_mp.width;
512 
513 	if (inst->codec == V4L2_PIX_FMT_H264)
514 		return hfi_buffer_non_comv_h264d(width, height, num_vpp_pipes);
515 	else if (inst->codec == V4L2_PIX_FMT_HEVC)
516 		return hfi_buffer_non_comv_h265d(width, height, num_vpp_pipes);
517 
518 	return 0;
519 }
520 
521 static u32 iris_vpu_dec_line_size(struct iris_inst *inst)
522 {
523 	u32 num_vpp_pipes = inst->core->iris_platform_data->num_vpp_pipe;
524 	struct v4l2_format *f = inst->fmt_src;
525 	u32 height = f->fmt.pix_mp.height;
526 	u32 width = f->fmt.pix_mp.width;
527 	bool is_opb = false;
528 	u32 out_min_count = inst->buffers[BUF_OUTPUT].min_count;
529 
530 	if (iris_split_mode_enabled(inst))
531 		is_opb = true;
532 
533 	if (inst->codec == V4L2_PIX_FMT_H264)
534 		return hfi_buffer_line_h264d(width, height, is_opb, num_vpp_pipes);
535 	else if (inst->codec == V4L2_PIX_FMT_HEVC)
536 		return hfi_buffer_line_h265d(width, height, is_opb, num_vpp_pipes);
537 	else if (inst->codec == V4L2_PIX_FMT_VP9)
538 		return hfi_buffer_line_vp9d(width, height, out_min_count, is_opb,
539 			num_vpp_pipes);
540 
541 	return 0;
542 }
543 
544 static u32 iris_vpu_dec_scratch1_size(struct iris_inst *inst)
545 {
546 	return iris_vpu_dec_comv_size(inst) +
547 		iris_vpu_dec_non_comv_size(inst) +
548 		iris_vpu_dec_line_size(inst);
549 }
550 
551 static int output_min_count(struct iris_inst *inst)
552 {
553 	int output_min_count = 4;
554 
555 	/* fw_min_count > 0 indicates reconfig event has already arrived */
556 	if (inst->fw_min_count) {
557 		if (iris_split_mode_enabled(inst) && inst->codec == V4L2_PIX_FMT_VP9)
558 			return min_t(u32, 4, inst->fw_min_count);
559 		else
560 			return inst->fw_min_count;
561 	}
562 
563 	if (inst->codec == V4L2_PIX_FMT_VP9)
564 		output_min_count = 9;
565 
566 	return output_min_count;
567 }
568 
569 struct iris_vpu_buf_type_handle {
570 	enum iris_buffer_type type;
571 	u32 (*handle)(struct iris_inst *inst);
572 };
573 
574 int iris_vpu_buf_size(struct iris_inst *inst, enum iris_buffer_type buffer_type)
575 {
576 	const struct iris_vpu_buf_type_handle *buf_type_handle_arr;
577 	u32 size = 0, buf_type_handle_size, i;
578 
579 	static const struct iris_vpu_buf_type_handle dec_internal_buf_type_handle[] = {
580 		{BUF_BIN,         iris_vpu_dec_bin_size             },
581 		{BUF_COMV,        iris_vpu_dec_comv_size            },
582 		{BUF_NON_COMV,    iris_vpu_dec_non_comv_size        },
583 		{BUF_LINE,        iris_vpu_dec_line_size            },
584 		{BUF_PERSIST,     iris_vpu_dec_persist_size         },
585 		{BUF_DPB,         iris_vpu_dec_dpb_size             },
586 		{BUF_SCRATCH_1,   iris_vpu_dec_scratch1_size        },
587 	};
588 
589 	buf_type_handle_size = ARRAY_SIZE(dec_internal_buf_type_handle);
590 	buf_type_handle_arr = dec_internal_buf_type_handle;
591 
592 	for (i = 0; i < buf_type_handle_size; i++) {
593 		if (buf_type_handle_arr[i].type == buffer_type) {
594 			size = buf_type_handle_arr[i].handle(inst);
595 			break;
596 		}
597 	}
598 
599 	return size;
600 }
601 
602 static u32 internal_buffer_count(struct iris_inst *inst,
603 				 enum iris_buffer_type buffer_type)
604 {
605 	if (buffer_type == BUF_BIN || buffer_type == BUF_LINE ||
606 	    buffer_type == BUF_PERSIST) {
607 		return 1;
608 	} else if (buffer_type == BUF_COMV || buffer_type == BUF_NON_COMV) {
609 		if (inst->codec == V4L2_PIX_FMT_H264 || inst->codec == V4L2_PIX_FMT_HEVC)
610 			return 1;
611 	}
612 	return 0;
613 }
614 
615 static inline int iris_vpu_dpb_count(struct iris_inst *inst)
616 {
617 	if (iris_split_mode_enabled(inst)) {
618 		return inst->fw_min_count ?
619 			inst->fw_min_count : inst->buffers[BUF_OUTPUT].min_count;
620 	}
621 
622 	return 0;
623 }
624 
625 int iris_vpu_buf_count(struct iris_inst *inst, enum iris_buffer_type buffer_type)
626 {
627 	switch (buffer_type) {
628 	case BUF_INPUT:
629 		return MIN_BUFFERS;
630 	case BUF_OUTPUT:
631 		return output_min_count(inst);
632 	case BUF_BIN:
633 	case BUF_COMV:
634 	case BUF_NON_COMV:
635 	case BUF_LINE:
636 	case BUF_PERSIST:
637 		return internal_buffer_count(inst, buffer_type);
638 	case BUF_SCRATCH_1:
639 		return 1; /* internal buffer count needed by firmware is 1 */
640 	case BUF_DPB:
641 		return iris_vpu_dpb_count(inst);
642 	default:
643 		return 0;
644 	}
645 }
646