xref: /linux/drivers/media/platform/qcom/iris/iris_buffer.c (revision 6dfafbd0299a60bfb5d5e277fdf100037c7ded07)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
4  */
5 
6 #include <media/v4l2-event.h>
7 #include <media/v4l2-mem2mem.h>
8 
9 #include "iris_buffer.h"
10 #include "iris_instance.h"
11 #include "iris_power.h"
12 #include "iris_vpu_buffer.h"
13 
14 #define PIXELS_4K 4096
15 #define MAX_WIDTH 4096
16 #define MAX_HEIGHT 2304
17 #define Y_STRIDE_ALIGN 128
18 #define UV_STRIDE_ALIGN 128
19 #define Y_SCANLINE_ALIGN 32
20 #define UV_SCANLINE_ALIGN 16
21 #define UV_SCANLINE_ALIGN_QC08C 32
22 #define META_STRIDE_ALIGNED 64
23 #define META_SCANLINE_ALIGNED 16
24 #define NUM_MBS_4K (DIV_ROUND_UP(MAX_WIDTH, 16) * DIV_ROUND_UP(MAX_HEIGHT, 16))
25 
26 /*
27  * NV12:
28  * YUV 4:2:0 image with a plane of 8 bit Y samples followed
29  * by an interleaved U/V plane containing 8 bit 2x2 subsampled
30  * colour difference samples.
31  *
32  * <-Y/UV_Stride (aligned to 128)->
33  * <------- Width ------->
34  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  ^           ^
35  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
36  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  Height      |
37  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |          y_scanlines (aligned to 32)
38  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
39  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
40  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |           |
41  * Y Y Y Y Y Y Y Y Y Y Y Y . . . .  V           |
42  * . . . . . . . . . . . . . . . .              |
43  * . . . . . . . . . . . . . . . .              |
44  * . . . . . . . . . . . . . . . .              |
45  * . . . . . . . . . . . . . . . .              V
46  * U V U V U V U V U V U V . . . .  ^
47  * U V U V U V U V U V U V . . . .  |
48  * U V U V U V U V U V U V . . . .  |
49  * U V U V U V U V U V U V . . . .  uv_scanlines (aligned to 16)
50  * . . . . . . . . . . . . . . . .  |
51  * . . . . . . . . . . . . . . . .  V
52  * . . . . . . . . . . . . . . . .  --> Buffer size aligned to 4K
53  *
54  * y_stride : Width aligned to 128
55  * uv_stride : Width aligned to 128
56  * y_scanlines: Height aligned to 32
57  * uv_scanlines: Height/2 aligned to 16
58  * Total size = align((y_stride * y_scanlines
59  *          + uv_stride * uv_scanlines , 4096)
60  *
61  * Note: All the alignments are hardware requirements.
62  */
63 static u32 iris_yuv_buffer_size_nv12(struct iris_inst *inst)
64 {
65 	u32 y_plane, uv_plane, y_stride, uv_stride, y_scanlines, uv_scanlines;
66 	struct v4l2_format *f;
67 
68 	if (inst->domain == DECODER)
69 		f = inst->fmt_dst;
70 	else
71 		f = inst->fmt_src;
72 
73 	y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
74 	uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
75 	y_scanlines = ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN);
76 	uv_scanlines = ALIGN((f->fmt.pix_mp.height + 1) >> 1, UV_SCANLINE_ALIGN);
77 	y_plane = y_stride * y_scanlines;
78 	uv_plane = uv_stride * uv_scanlines;
79 
80 	return ALIGN(y_plane + uv_plane, PIXELS_4K);
81 }
82 
83 /*
84  * QC08C:
85  * Compressed Macro-tile format for NV12.
86  * Contains 4 planes in the following order -
87  * (A) Y_Meta_Plane
88  * (B) Y_UBWC_Plane
89  * (C) UV_Meta_Plane
90  * (D) UV_UBWC_Plane
91  *
92  * Y_Meta_Plane consists of meta information to decode compressed
93  * tile data in Y_UBWC_Plane.
94  * Y_UBWC_Plane consists of Y data in compressed macro-tile format.
95  * UBWC decoder block will use the Y_Meta_Plane data together with
96  * Y_UBWC_Plane data to produce loss-less uncompressed 8 bit Y samples.
97  *
98  * UV_Meta_Plane consists of meta information to decode compressed
99  * tile data in UV_UBWC_Plane.
100  * UV_UBWC_Plane consists of UV data in compressed macro-tile format.
101  * UBWC decoder block will use UV_Meta_Plane data together with
102  * UV_UBWC_Plane data to produce loss-less uncompressed 8 bit 2x2
103  * subsampled color difference samples.
104  *
105  * Each tile in Y_UBWC_Plane/UV_UBWC_Plane is independently decodable
106  * and randomly accessible. There is no dependency between tiles.
107  *
108  * <----- y_meta_stride ----> (aligned to 64)
109  * <-------- Width ------>
110  * M M M M M M M M M M M M . .      ^           ^
111  * M M M M M M M M M M M M . .      |           |
112  * M M M M M M M M M M M M . .      Height      |
113  * M M M M M M M M M M M M . .      |         y_meta_scanlines  (aligned to 16)
114  * M M M M M M M M M M M M . .      |           |
115  * M M M M M M M M M M M M . .      |           |
116  * M M M M M M M M M M M M . .      |           |
117  * M M M M M M M M M M M M . .      V           |
118  * . . . . . . . . . . . . . .                  |
119  * . . . . . . . . . . . . . .                  |
120  * . . . . . . . . . . . . . .      -------> Buffer size aligned to 4k
121  * . . . . . . . . . . . . . .                  V
122  * <--Compressed tile y_stride---> (aligned to 128)
123  * <------- Width ------->
124  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  ^           ^
125  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
126  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  Height      |
127  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |        Macro_tile y_scanlines (aligned to 32)
128  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
129  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
130  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  |           |
131  * Y* Y* Y* Y* Y* Y* Y* Y* . . . .  V           |
132  * . . . . . . . . . . . . . . . .              |
133  * . . . . . . . . . . . . . . . .              |
134  * . . . . . . . . . . . . . . . .  -------> Buffer size aligned to 4k
135  * . . . . . . . . . . . . . . . .              V
136  * <----- uv_meta_stride ---->  (aligned to 64)
137  * M M M M M M M M M M M M . .      ^
138  * M M M M M M M M M M M M . .      |
139  * M M M M M M M M M M M M . .      |
140  * M M M M M M M M M M M M . .      uv_meta_scanlines (aligned to 16)
141  * . . . . . . . . . . . . . .      |
142  * . . . . . . . . . . . . . .      V
143  * . . . . . . . . . . . . . .      -------> Buffer size aligned to 4k
144  * <--Compressed tile uv_stride---> (aligned to 128)
145  * U* V* U* V* U* V* U* V* . . . .  ^
146  * U* V* U* V* U* V* U* V* . . . .  |
147  * U* V* U* V* U* V* U* V* . . . .  |
148  * U* V* U* V* U* V* U* V* . . . .  uv_scanlines (aligned to 32)
149  * . . . . . . . . . . . . . . . .  |
150  * . . . . . . . . . . . . . . . .  V
151  * . . . . . . . . . . . . . . . .  -------> Buffer size aligned to 4k
152  *
153  * y_stride: width aligned to 128
154  * uv_stride: width aligned to 128
155  * y_scanlines: height aligned to 32
156  * uv_scanlines: height aligned to 32
157  * y_plane: buffer size aligned to 4096
158  * uv_plane: buffer size aligned to 4096
159  * y_meta_stride: width aligned to 64
160  * y_meta_scanlines: height aligned to 16
161  * y_meta_plane: buffer size aligned to 4096
162  * uv_meta_stride: width aligned to 64
163  * uv_meta_scanlines: height aligned to 16
164  * uv_meta_plane: buffer size aligned to 4096
165  *
166  * Total size = align( y_plane + uv_plane +
167  *           y_meta_plane + uv_meta_plane, 4096)
168  *
169  * Note: All the alignments are hardware requirements.
170  */
171 static u32 iris_yuv_buffer_size_qc08c(struct iris_inst *inst)
172 {
173 	u32 y_plane, uv_plane, y_stride, uv_stride;
174 	u32 uv_meta_stride, uv_meta_plane;
175 	u32 y_meta_stride, y_meta_plane;
176 	struct v4l2_format *f = NULL;
177 
178 	if (inst->domain == DECODER)
179 		f = inst->fmt_dst;
180 	else
181 		f = inst->fmt_src;
182 
183 	y_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width, META_STRIDE_ALIGNED >> 1),
184 			      META_STRIDE_ALIGNED);
185 	y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height,
186 							  META_SCANLINE_ALIGNED >> 1),
187 					     META_SCANLINE_ALIGNED);
188 	y_meta_plane = ALIGN(y_meta_plane, PIXELS_4K);
189 
190 	y_stride = ALIGN(f->fmt.pix_mp.width, Y_STRIDE_ALIGN);
191 	y_plane = ALIGN(y_stride * ALIGN(f->fmt.pix_mp.height, Y_SCANLINE_ALIGN), PIXELS_4K);
192 
193 	uv_meta_stride = ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.width / 2, META_STRIDE_ALIGNED >> 2),
194 			       META_STRIDE_ALIGNED);
195 	uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(f->fmt.pix_mp.height / 2,
196 							    META_SCANLINE_ALIGNED >> 1),
197 					       META_SCANLINE_ALIGNED);
198 	uv_meta_plane = ALIGN(uv_meta_plane, PIXELS_4K);
199 
200 	uv_stride = ALIGN(f->fmt.pix_mp.width, UV_STRIDE_ALIGN);
201 	uv_plane = ALIGN(uv_stride * ALIGN(f->fmt.pix_mp.height / 2, UV_SCANLINE_ALIGN_QC08C),
202 			 PIXELS_4K);
203 
204 	return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane, PIXELS_4K);
205 }
206 
207 static u32 iris_dec_bitstream_buffer_size(struct iris_inst *inst)
208 {
209 	struct platform_inst_caps *caps = inst->core->iris_platform_data->inst_caps;
210 	u32 base_res_mbs = NUM_MBS_4K;
211 	u32 frame_size, num_mbs;
212 	u32 div_factor = 2;
213 
214 	num_mbs = iris_get_mbpf(inst);
215 	if (num_mbs > NUM_MBS_4K) {
216 		div_factor = 4;
217 		base_res_mbs = caps->max_mbpf;
218 	} else {
219 		if (inst->codec == V4L2_PIX_FMT_VP9)
220 			div_factor = 1;
221 	}
222 
223 	/*
224 	 * frame_size = YUVsize / div_factor
225 	 * where YUVsize = resolution_in_MBs * MBs_in_pixel * 3 / 2
226 	 */
227 	frame_size = base_res_mbs * (16 * 16) * 3 / 2 / div_factor;
228 
229 	return ALIGN(frame_size, PIXELS_4K);
230 }
231 
232 static u32 iris_enc_bitstream_buffer_size(struct iris_inst *inst)
233 {
234 	u32 aligned_width, aligned_height, bitstream_size, yuv_size;
235 	int bitrate_mode, frame_rc;
236 	struct v4l2_format *f;
237 
238 	f = inst->fmt_dst;
239 
240 	bitrate_mode = inst->fw_caps[BITRATE_MODE].value;
241 	frame_rc = inst->fw_caps[FRAME_RC_ENABLE].value;
242 
243 	aligned_width = ALIGN(f->fmt.pix_mp.width, 32);
244 	aligned_height = ALIGN(f->fmt.pix_mp.height, 32);
245 	bitstream_size = aligned_width * aligned_height * 3;
246 	yuv_size = (aligned_width * aligned_height * 3) >> 1;
247 	if (aligned_width * aligned_height > (4096 * 2176))
248 		/* bitstream_size = 0.25 * yuv_size; */
249 		bitstream_size = (bitstream_size >> 3);
250 	else if (aligned_width * aligned_height > (1280 * 720))
251 		/* bitstream_size = 0.5 * yuv_size; */
252 		bitstream_size = (bitstream_size >> 2);
253 
254 	if ((!frame_rc || bitrate_mode == V4L2_MPEG_VIDEO_BITRATE_MODE_CQ) &&
255 	    bitstream_size < yuv_size)
256 		bitstream_size = (bitstream_size << 1);
257 
258 	return ALIGN(bitstream_size, 4096);
259 }
260 
261 int iris_get_buffer_size(struct iris_inst *inst,
262 			 enum iris_buffer_type buffer_type)
263 {
264 	if (inst->domain == DECODER) {
265 		switch (buffer_type) {
266 		case BUF_INPUT:
267 			return iris_dec_bitstream_buffer_size(inst);
268 		case BUF_OUTPUT:
269 			if (inst->fmt_dst->fmt.pix_mp.pixelformat == V4L2_PIX_FMT_QC08C)
270 				return iris_yuv_buffer_size_qc08c(inst);
271 			else
272 				return iris_yuv_buffer_size_nv12(inst);
273 		case BUF_DPB:
274 			return iris_yuv_buffer_size_qc08c(inst);
275 		default:
276 			return 0;
277 		}
278 	} else {
279 		switch (buffer_type) {
280 		case BUF_INPUT:
281 			if (inst->fmt_src->fmt.pix_mp.pixelformat == V4L2_PIX_FMT_QC08C)
282 				return iris_yuv_buffer_size_qc08c(inst);
283 			else
284 				return iris_yuv_buffer_size_nv12(inst);
285 		case BUF_OUTPUT:
286 			return iris_enc_bitstream_buffer_size(inst);
287 		default:
288 			return 0;
289 		}
290 	}
291 }
292 
293 static void iris_fill_internal_buf_info(struct iris_inst *inst,
294 					enum iris_buffer_type buffer_type)
295 {
296 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
297 
298 	buffers->size = inst->core->iris_platform_data->get_vpu_buffer_size(inst, buffer_type);
299 	buffers->min_count = iris_vpu_buf_count(inst, buffer_type);
300 }
301 
302 void iris_get_internal_buffers(struct iris_inst *inst, u32 plane)
303 {
304 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
305 	const u32 *internal_buf_type;
306 	u32 internal_buffer_count, i;
307 
308 	if (inst->domain == DECODER) {
309 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
310 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
311 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
312 			for (i = 0; i < internal_buffer_count; i++)
313 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
314 		} else {
315 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
316 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
317 			for (i = 0; i < internal_buffer_count; i++)
318 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
319 		}
320 	} else {
321 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
322 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
323 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
324 			for (i = 0; i < internal_buffer_count; i++)
325 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
326 		} else {
327 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
328 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
329 			for (i = 0; i < internal_buffer_count; i++)
330 				iris_fill_internal_buf_info(inst, internal_buf_type[i]);
331 		}
332 	}
333 }
334 
335 static int iris_create_internal_buffer(struct iris_inst *inst,
336 				       enum iris_buffer_type buffer_type, u32 index)
337 {
338 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
339 	struct iris_core *core = inst->core;
340 	struct iris_buffer *buffer;
341 
342 	if (!buffers->size)
343 		return 0;
344 
345 	buffer = kzalloc(sizeof(*buffer), GFP_KERNEL);
346 	if (!buffer)
347 		return -ENOMEM;
348 
349 	INIT_LIST_HEAD(&buffer->list);
350 	buffer->type = buffer_type;
351 	buffer->index = index;
352 	buffer->buffer_size = buffers->size;
353 	buffer->dma_attrs = DMA_ATTR_WRITE_COMBINE | DMA_ATTR_NO_KERNEL_MAPPING;
354 	list_add_tail(&buffer->list, &buffers->list);
355 
356 	buffer->kvaddr = dma_alloc_attrs(core->dev, buffer->buffer_size,
357 					 &buffer->device_addr, GFP_KERNEL, buffer->dma_attrs);
358 	if (!buffer->kvaddr)
359 		return -ENOMEM;
360 
361 	return 0;
362 }
363 
364 int iris_create_internal_buffers(struct iris_inst *inst, u32 plane)
365 {
366 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
367 	u32 internal_buffer_count, i, j;
368 	struct iris_buffers *buffers;
369 	const u32 *internal_buf_type;
370 	int ret;
371 
372 	if (inst->domain == DECODER) {
373 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
374 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
375 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
376 		} else {
377 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
378 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
379 		}
380 	} else {
381 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
382 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
383 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
384 		} else {
385 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
386 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
387 		}
388 	}
389 
390 	for (i = 0; i < internal_buffer_count; i++) {
391 		buffers = &inst->buffers[internal_buf_type[i]];
392 		for (j = 0; j < buffers->min_count; j++) {
393 			ret = iris_create_internal_buffer(inst, internal_buf_type[i], j);
394 			if (ret)
395 				return ret;
396 		}
397 	}
398 
399 	return 0;
400 }
401 
402 int iris_queue_buffer(struct iris_inst *inst, struct iris_buffer *buf)
403 {
404 	const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
405 	int ret;
406 
407 	ret = hfi_ops->session_queue_buf(inst, buf);
408 	if (ret)
409 		return ret;
410 
411 	buf->attr &= ~BUF_ATTR_DEFERRED;
412 	buf->attr |= BUF_ATTR_QUEUED;
413 
414 	return 0;
415 }
416 
417 int iris_queue_internal_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buffer_type)
418 {
419 	struct iris_buffer *buffer, *next;
420 	struct iris_buffers *buffers;
421 	int ret = 0;
422 
423 	buffers = &inst->buffers[buffer_type];
424 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
425 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
426 			continue;
427 		if (buffer->attr & BUF_ATTR_QUEUED)
428 			continue;
429 
430 		if (buffer->attr & BUF_ATTR_DEFERRED) {
431 			ret = iris_queue_buffer(inst, buffer);
432 			if (ret)
433 				return ret;
434 		}
435 	}
436 
437 	return ret;
438 }
439 
440 int iris_queue_internal_buffers(struct iris_inst *inst, u32 plane)
441 {
442 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
443 	struct iris_buffer *buffer, *next;
444 	struct iris_buffers *buffers;
445 	const u32 *internal_buf_type;
446 	u32 internal_buffer_count, i;
447 	int ret;
448 
449 	if (inst->domain == DECODER) {
450 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
451 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
452 			internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
453 		} else {
454 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
455 			internal_buffer_count = platform_data->dec_op_int_buf_tbl_size;
456 		}
457 	} else {
458 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
459 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
460 			internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
461 		} else {
462 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
463 			internal_buffer_count = platform_data->enc_op_int_buf_tbl_size;
464 		}
465 	}
466 
467 	for (i = 0; i < internal_buffer_count; i++) {
468 		buffers = &inst->buffers[internal_buf_type[i]];
469 		list_for_each_entry_safe(buffer, next, &buffers->list, list) {
470 			if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
471 				continue;
472 			if (buffer->attr & BUF_ATTR_QUEUED)
473 				continue;
474 			if (buffer->type == BUF_DPB && inst->state != IRIS_INST_STREAMING) {
475 				buffer->attr |= BUF_ATTR_DEFERRED;
476 				continue;
477 			}
478 			ret = iris_queue_buffer(inst, buffer);
479 			if (ret)
480 				return ret;
481 		}
482 	}
483 
484 	return 0;
485 }
486 
487 int iris_destroy_internal_buffer(struct iris_inst *inst, struct iris_buffer *buffer)
488 {
489 	struct iris_core *core = inst->core;
490 
491 	list_del(&buffer->list);
492 	dma_free_attrs(core->dev, buffer->buffer_size, buffer->kvaddr,
493 		       buffer->device_addr, buffer->dma_attrs);
494 	kfree(buffer);
495 
496 	return 0;
497 }
498 
499 static int iris_destroy_internal_buffers(struct iris_inst *inst, u32 plane, bool force)
500 {
501 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
502 	struct iris_buffer *buf, *next;
503 	struct iris_buffers *buffers;
504 	const u32 *internal_buf_type;
505 	u32 i, len;
506 	int ret;
507 
508 	if (inst->domain == DECODER) {
509 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
510 			internal_buf_type = platform_data->dec_ip_int_buf_tbl;
511 			len = platform_data->dec_ip_int_buf_tbl_size;
512 		} else {
513 			internal_buf_type = platform_data->dec_op_int_buf_tbl;
514 			len = platform_data->dec_op_int_buf_tbl_size;
515 		}
516 	} else {
517 		if (V4L2_TYPE_IS_OUTPUT(plane)) {
518 			internal_buf_type = platform_data->enc_ip_int_buf_tbl;
519 			len = platform_data->enc_ip_int_buf_tbl_size;
520 		} else {
521 			internal_buf_type = platform_data->enc_op_int_buf_tbl;
522 			len = platform_data->enc_op_int_buf_tbl_size;
523 		}
524 	}
525 
526 	for (i = 0; i < len; i++) {
527 		buffers = &inst->buffers[internal_buf_type[i]];
528 		list_for_each_entry_safe(buf, next, &buffers->list, list) {
529 			/*
530 			 * during stream on, skip destroying internal(DPB) buffer
531 			 * if firmware did not return it.
532 			 * during close, destroy all buffers irrespectively.
533 			 */
534 			if (!force && buf->attr & BUF_ATTR_QUEUED)
535 				continue;
536 
537 			ret = iris_destroy_internal_buffer(inst, buf);
538 			if (ret)
539 				return ret;
540 		}
541 	}
542 
543 	if (force) {
544 		if (inst->domain == DECODER)
545 			buffers = &inst->buffers[BUF_PERSIST];
546 		else
547 			buffers = &inst->buffers[BUF_ARP];
548 
549 		list_for_each_entry_safe(buf, next, &buffers->list, list) {
550 			ret = iris_destroy_internal_buffer(inst, buf);
551 			if (ret)
552 				return ret;
553 		}
554 	}
555 
556 	return 0;
557 }
558 
559 int iris_destroy_all_internal_buffers(struct iris_inst *inst, u32 plane)
560 {
561 	return iris_destroy_internal_buffers(inst, plane, true);
562 }
563 
564 int iris_destroy_dequeued_internal_buffers(struct iris_inst *inst, u32 plane)
565 {
566 	return iris_destroy_internal_buffers(inst, plane, false);
567 }
568 
569 static int iris_release_internal_buffers(struct iris_inst *inst,
570 					 enum iris_buffer_type buffer_type)
571 {
572 	const struct iris_hfi_command_ops *hfi_ops = inst->core->hfi_ops;
573 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
574 	struct iris_buffer *buffer, *next;
575 	int ret;
576 
577 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
578 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
579 			continue;
580 		if (!(buffer->attr & BUF_ATTR_QUEUED))
581 			continue;
582 		ret = hfi_ops->session_release_buf(inst, buffer);
583 		if (ret)
584 			return ret;
585 		buffer->attr |= BUF_ATTR_PENDING_RELEASE;
586 	}
587 
588 	return 0;
589 }
590 
591 static int iris_release_input_internal_buffers(struct iris_inst *inst)
592 {
593 	const struct iris_platform_data *platform_data = inst->core->iris_platform_data;
594 	const u32 *internal_buf_type;
595 	u32 internal_buffer_count, i;
596 	int ret;
597 
598 	if (inst->domain == DECODER) {
599 		internal_buf_type = platform_data->dec_ip_int_buf_tbl;
600 		internal_buffer_count = platform_data->dec_ip_int_buf_tbl_size;
601 	} else {
602 		internal_buf_type = platform_data->enc_ip_int_buf_tbl;
603 		internal_buffer_count = platform_data->enc_ip_int_buf_tbl_size;
604 	}
605 
606 	for (i = 0; i < internal_buffer_count; i++) {
607 		ret = iris_release_internal_buffers(inst, internal_buf_type[i]);
608 		if (ret)
609 			return ret;
610 	}
611 
612 	return 0;
613 }
614 
615 int iris_alloc_and_queue_persist_bufs(struct iris_inst *inst, enum iris_buffer_type buffer_type)
616 {
617 	struct iris_buffers *buffers = &inst->buffers[buffer_type];
618 	struct iris_buffer *buffer, *next;
619 	int ret;
620 	u32 i;
621 
622 	if (!list_empty(&buffers->list))
623 		return 0;
624 
625 	iris_fill_internal_buf_info(inst, buffer_type);
626 
627 	for (i = 0; i < buffers->min_count; i++) {
628 		ret = iris_create_internal_buffer(inst, buffer_type, i);
629 		if (ret)
630 			return ret;
631 	}
632 
633 	list_for_each_entry_safe(buffer, next, &buffers->list, list) {
634 		if (buffer->attr & BUF_ATTR_PENDING_RELEASE)
635 			continue;
636 		if (buffer->attr & BUF_ATTR_QUEUED)
637 			continue;
638 		ret = iris_queue_buffer(inst, buffer);
639 		if (ret)
640 			return ret;
641 	}
642 
643 	return 0;
644 }
645 
646 int iris_alloc_and_queue_input_int_bufs(struct iris_inst *inst)
647 {
648 	int ret;
649 
650 	iris_get_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
651 
652 	ret = iris_release_input_internal_buffers(inst);
653 	if (ret)
654 		return ret;
655 
656 	ret = iris_create_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
657 	if (ret)
658 		return ret;
659 
660 	return iris_queue_internal_buffers(inst, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
661 }
662 
663 int iris_queue_deferred_buffers(struct iris_inst *inst, enum iris_buffer_type buf_type)
664 {
665 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
666 	struct v4l2_m2m_buffer *buffer, *n;
667 	struct iris_buffer *buf;
668 	int ret;
669 
670 	iris_scale_power(inst);
671 
672 	if (buf_type == BUF_INPUT) {
673 		v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buffer, n) {
674 			buf = to_iris_buffer(&buffer->vb);
675 			if (!(buf->attr & BUF_ATTR_DEFERRED))
676 				continue;
677 			ret = iris_queue_buffer(inst, buf);
678 			if (ret)
679 				return ret;
680 		}
681 	} else {
682 		v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buffer, n) {
683 			buf = to_iris_buffer(&buffer->vb);
684 			if (!(buf->attr & BUF_ATTR_DEFERRED))
685 				continue;
686 			ret = iris_queue_buffer(inst, buf);
687 			if (ret)
688 				return ret;
689 		}
690 	}
691 
692 	return 0;
693 }
694 
695 void iris_vb2_queue_error(struct iris_inst *inst)
696 {
697 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
698 	struct vb2_queue *q;
699 
700 	q = v4l2_m2m_get_src_vq(m2m_ctx);
701 	vb2_queue_error(q);
702 	q = v4l2_m2m_get_dst_vq(m2m_ctx);
703 	vb2_queue_error(q);
704 }
705 
706 static struct vb2_v4l2_buffer *
707 iris_helper_find_buf(struct iris_inst *inst, u32 type, u32 idx)
708 {
709 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
710 
711 	if (V4L2_TYPE_IS_OUTPUT(type))
712 		return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
713 	else
714 		return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
715 }
716 
717 static void iris_get_ts_metadata(struct iris_inst *inst, u64 timestamp_ns,
718 				 struct vb2_v4l2_buffer *vbuf)
719 {
720 	u32 mask = V4L2_BUF_FLAG_TIMECODE | V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
721 	u32 i;
722 
723 	for (i = 0; i < ARRAY_SIZE(inst->tss); ++i) {
724 		if (inst->tss[i].ts_ns != timestamp_ns)
725 			continue;
726 
727 		vbuf->flags &= ~mask;
728 		vbuf->flags |= inst->tss[i].flags;
729 		vbuf->timecode = inst->tss[i].tc;
730 		return;
731 	}
732 
733 	vbuf->flags &= ~mask;
734 	vbuf->flags |= inst->tss[inst->metadata_idx].flags;
735 	vbuf->timecode = inst->tss[inst->metadata_idx].tc;
736 }
737 
738 int iris_vb2_buffer_done(struct iris_inst *inst, struct iris_buffer *buf)
739 {
740 	struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
741 	struct vb2_v4l2_buffer *vbuf;
742 	struct vb2_buffer *vb2;
743 	u32 type, state;
744 
745 	switch (buf->type) {
746 	case BUF_INPUT:
747 		type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
748 		break;
749 	case BUF_OUTPUT:
750 		type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
751 		break;
752 	default:
753 		return 0; /* Internal DPB Buffers */
754 	}
755 
756 	vbuf = iris_helper_find_buf(inst, type, buf->index);
757 	if (!vbuf)
758 		return -EINVAL;
759 
760 	vb2 = &vbuf->vb2_buf;
761 
762 	vbuf->flags |= buf->flags;
763 
764 	if (buf->flags & V4L2_BUF_FLAG_ERROR) {
765 		state = VB2_BUF_STATE_ERROR;
766 		vb2_set_plane_payload(vb2, 0, 0);
767 		vb2->timestamp = 0;
768 		v4l2_m2m_buf_done(vbuf, state);
769 		return 0;
770 	}
771 
772 	if (V4L2_TYPE_IS_CAPTURE(type)) {
773 		vb2_set_plane_payload(vb2, 0, buf->data_size);
774 		vbuf->sequence = inst->sequence_cap++;
775 		iris_get_ts_metadata(inst, buf->timestamp, vbuf);
776 	} else {
777 		vbuf->sequence = inst->sequence_out++;
778 	}
779 
780 	if (vbuf->flags & V4L2_BUF_FLAG_LAST) {
781 		if (!v4l2_m2m_has_stopped(m2m_ctx)) {
782 			const struct v4l2_event ev = { .type = V4L2_EVENT_EOS };
783 
784 			v4l2_event_queue_fh(&inst->fh, &ev);
785 			v4l2_m2m_mark_stopped(m2m_ctx);
786 		}
787 		inst->last_buffer_dequeued = true;
788 	}
789 
790 	state = VB2_BUF_STATE_DONE;
791 	vb2->timestamp = buf->timestamp;
792 	v4l2_m2m_buf_done(vbuf, state);
793 
794 	return 0;
795 }
796